diff --git a/.codecov.yml b/.codecov.yml index a7ed5e802..2c0cbe97e 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -11,7 +11,7 @@ coverage: project: default: # minimum coverage ratio that the commit must meet to be considered a success - target: 82% + target: 85% if_ci_failed: error only_pulls: true diff --git a/.github/workflows/base_benchmarks.yml b/.github/workflows/base_benchmarks.yml index 7e9e3446a..b2e368a2e 100644 --- a/.github/workflows/base_benchmarks.yml +++ b/.github/workflows/base_benchmarks.yml @@ -22,7 +22,7 @@ jobs: - uses: ./.github/actions/setup-hathor-env name: Setup Hathor node environment with: - python: 3.11 + python: 3.12 os: ubuntu-22.04 - name: Set env vars run: cat ./extras/benchmarking/sync_v2/.env >> $GITHUB_ENV diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index 88d8898df..f64c79d3a 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -23,7 +23,6 @@ jobs: python-impl: - python python-version: - - '3.10' - '3.11' - '3.12' steps: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 12c57b0fa..687ec9317 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -23,7 +23,7 @@ jobs: import os import json full_matrix = { - 'python': ['3.10', '3.11', '3.12'], + 'python': ['3.11', '3.12'], # available OS's: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on 'os': ['ubuntu-22.04', 'macos-13'], } @@ -83,6 +83,6 @@ jobs: run: poetry run make tests - name: Upload coverage uses: codecov/codecov-action@v4 - if: matrix.python == 3.11 && startsWith(matrix.os, 'ubuntu') + if: matrix.python == 3.12 && startsWith(matrix.os, 'ubuntu') env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/pr_benchmarks.yml b/.github/workflows/pr_benchmarks.yml index a8d9e9dc0..a4aa0fc1e 100644 --- a/.github/workflows/pr_benchmarks.yml +++ b/.github/workflows/pr_benchmarks.yml @@ -26,7 +26,7 @@ jobs: - uses: ./.github/actions/setup-hathor-env name: Setup Hathor node environment with: - python: 3.11 + python: 3.12 os: ubuntu-22.04 - name: Set env vars run: cat ./extras/benchmarking/sync_v2/.env >> $GITHUB_ENV diff --git a/.gitignore b/.gitignore index 1db0c5e78..a90f1a9ee 100644 --- a/.gitignore +++ b/.gitignore @@ -7,7 +7,7 @@ __pycache__/ docs/_build/ -.coverage +.coverage* cover/ /coverage* @@ -25,3 +25,8 @@ extras/docker/envvars # Nix .direnv/ + +keys.json + +# Pycharm +.idea diff --git a/Dockerfile b/Dockerfile index f1fa13f38..fdb14d744 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,10 +1,10 @@ -# before changing these variables, make sure the tag $PYTHON-alpine$ALPINE exists first +# before changing these variables, make sure the tag $PYTHON-slim-$DEBIAN exists first # list of valid tags hese: https://hub.docker.com/_/python -ARG PYTHON=3.11 +ARG PYTHON=3.12 ARG DEBIAN=bullseye # stage-0: copy pyproject.toml/poetry.lock and install the production set of dependencies -FROM python:$PYTHON-slim-$DEBIAN as stage-0 +FROM python:$PYTHON-slim-$DEBIAN AS stage-0 ARG PYTHON # install runtime first deps to speedup the dev deps and because layers will be reused on stage-1 RUN apt-get -qy update diff --git a/Makefile b/Makefile index eed3cee4e..bfe793566 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,7 @@ all: check tests # testing: tests_cli = tests/cli/ +tests_nano = tests/nanocontracts/ tests/tx/test_indexes_nc_history.py tests/resources/nanocontracts/ tests_lib = $(filter-out ${tests_cli} tests/__pycache__/, $(dir $(wildcard tests/*/.))) tests_ci = extras/github/ @@ -24,6 +25,10 @@ pytest_flags = -p no:warnings --cov-report=term --cov-report=html --cov-report=x #--implicit-reexport #--no-implicit-reexport +.PHONY: tests-nano +tests-nano: + pytest --durations=10 --cov-report=html --cov=hathor/nanocontracts/ --cov-config=.coveragerc_full -p no:warnings $(tests_nano) + .PHONY: tests-cli tests-cli: pytest --durations=10 --cov=hathor/cli/ --cov-config=.coveragerc_full --cov-fail-under=27 -p no:warnings $(tests_cli) @@ -42,8 +47,9 @@ tests-quick: .PHONY: tests-genesis tests-genesis: - HATHOR_TEST_CONFIG_YAML='./hathor/conf/mainnet.yml' pytest tests/tx/test_genesis.py - HATHOR_TEST_CONFIG_YAML='./hathor/conf/testnet.yml' pytest tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathor/conf/mainnet.yml' pytest -n0 tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathor/conf/testnet.yml' pytest -n0 tests/tx/test_genesis.py + HATHOR_TEST_CONFIG_YAML='./hathor/conf/nano_testnet.yml' pytest -n0 tests/tx/test_genesis.py .PHONY: tests-ci tests-ci: @@ -147,3 +153,16 @@ docker-push: docker docker-push-aws: docker docker tag $(docker_tag) 769498303037.dkr.ecr.us-east-1.amazonaws.com/fullnode:$(docker_subtag) docker push 769498303037.dkr.ecr.us-east-1.amazonaws.com/fullnode:$(docker_subtag) + +# If you get errors similar to the one below, running `make fix-rocksdb` may fix the problem. +# +# Traceback (most recent call last): +# File "", line 1, in +# File "//pypoetry/virtualenvs/hathor-29FNXj3I-py3.11/lib/python3.11/site-packages/rocksdb/__init__.py", line 1, in +# from ._rocksdb import * +# ImportError: dlopen(//pypoetry/virtualenvs/hathor-29FNXj3I-py3.11/lib/python3.11/site-packages/rocksdb/_rocksdb.cpython-311-darwin.so, 0x0002): Library not loaded: /opt/homebrew/opt/rocksdb/lib/librocksdb.9.dylib +# Referenced from: //pypoetry/virtualenvs/hathor-29FNXj3I-py3.11/lib/python3.11/site-packages/rocksdb/_rocksdb.cpython-311-darwin.so +# Reason: tried: '/opt/homebrew/opt/rocksdb/lib/librocksdb.9.dylib' (no such file), '/System/Volumes/Preboot/Cryptexes/OS/opt/homebrew/opt/rocksdb/lib/librocksdb.9.dylib' (no such file), '/opt/homebrew/opt/rocksdb/lib/librocksdb.9.dylib' (no such file), '/opt/homebrew/Cellar/rocksdb/10.0.1/lib/librocksdb.9.dylib' (no such file), '/System/Volumes/Preboot/Cryptexes/OS/opt/homebrew/Cellar/rocksdb/10.0.1/lib/librocksdb.9.dylib' (no such file), '/opt/homebrew/Cellar/rocksdb/10.0.1/lib/librocksdb.9.dylib' (no such file) +.PHONY: fix-rocksdb +fix-rocksdb: + poetry run pip uninstall -y rocksdb && poetry run pip install --no-binary :all: git+https://github.com/hathornetwork/python-rocksdb.git diff --git a/docs/event-queue-feature.md b/docs/event-queue-feature.md index 81025e740..0f564391f 100644 --- a/docs/event-queue-feature.md +++ b/docs/event-queue-feature.md @@ -20,21 +20,19 @@ To enable the Event Queue feature, you must add this CLI option when running the For example: ```bash -poetry run hathor-cli run_node --memory-storage --status 8080 --testnet --enable-event-queue +poetry run hathor-cli run_node --temp-data --status 8080 --testnet --enable-event-queue ``` ### First run -If this is the first time your full node is running with the event queue enabled, there are 3 possibilities: +If this is the first time your full node is running with the event queue enabled, there are 2 possibilities: -1. You're running the full node using memory storage, like in the example above; -2. You're running the full node using RocksDB storage (the default option), and - 1. You're performing a sync from scratch, that is, you don't have an existing database, or - 2. You're running from an existing database. +1. You're performing a sync from scratch or you're using a temporary database (like in the example above), that is, you don't have an existing database, or +2. You're running from an existing database. -For cases 1 and 2.1, the full node will start normally, events will be generated in real time while vertices are synced, and they'll be sent to the WebSocket connection accordingly, as explained below. +For case 1, the full node will start normally, events will be generated in real time while vertices are synced and they'll be sent to the WebSocket connection accordingly, as explained below. -For case 2.2, an extra loading step will be performed during full node initialization, generating events for all existing vertices in your database. This step is slower than normal full node initialization and can take several minutes. Note that this will only be necessary once — after initialization, the events generated for your database are persisted and will be used in subsequent runs. +For case 2, an extra loading step will be performed during full node initialization, generating events for all existing vertices in your database. This step is slower than normal full node initialization and can take several minutes. Note that this will only be necessary once — after initialization, the events generated for your database are persisted and will be used in subsequent runs. ### Subsequent runs when using RocksDB diff --git a/extras/custom_checks.sh b/extras/custom_checks.sh index ece887832..7fcf2e796 100644 --- a/extras/custom_checks.sh +++ b/extras/custom_checks.sh @@ -58,6 +58,7 @@ function check_do_not_use_builtin_random_in_tests() { hathor/merged_mining/debug_api.py hathor/client.py hathor/cli/tx_generator.py + tests/test_utils/test_leb128.py ) exclude_params=() for item in "${exclude[@]}"; do @@ -81,9 +82,10 @@ function check_deprecated_typing() { } function check_do_not_import_tests_in_hathor() { - if grep -R '\<.*import .*tests.*\>\|\<.*from .*tests.* import\>' "hathor"; then + if grep -R '\<.*import .*tests.*\>\|\<.*from .*tests.* import\>' "hathor" | grep -v '# skip-import-tests-custom-check'; then echo 'do not import test definitions in the hathor module' echo 'move them from tests to hathor instead' + echo 'alternatively, comment `# skip-import-tests-custom-check` to exclude a line.' return 1 fi return 0 diff --git a/extras/custom_tests/side_dag/utils.py b/extras/custom_tests/side_dag/utils.py index 7793c30ac..4359b1d36 100644 --- a/extras/custom_tests/side_dag/utils.py +++ b/extras/custom_tests/side_dag/utils.py @@ -29,11 +29,11 @@ python -m hathor run_node_with_side_dag --disable-logs --testnet - --memory-storage + --temp-data --x-localhost-only --procname-prefix {HATHOR_PROCESS_PREFIX} --side-dag-testnet - --side-dag-memory-storage + --side-dag-temp-data --side-dag-x-localhost-only --side-dag-procname-prefix {SIDE_DAG_PROCESS_PREFIX} """ diff --git a/extras/github/docker.py b/extras/github/docker.py index b7ee9bbf4..ddc7bcadd 100644 --- a/extras/github/docker.py +++ b/extras/github/docker.py @@ -18,7 +18,7 @@ def prep_base_version(environ: Dict): ref = GITHUB_REF # Set base_version according to the github ref type - is_release_candidate = False + is_pre_release = False is_release = False is_nightly = False @@ -42,9 +42,9 @@ def prep_base_version(environ: Dict): # Check if this is a release-candidate if pre_release: - if re.match(r'^rc\.[0-9]{1,3}$', pre_release): + if re.match(r'^(rc|alpha|beta)\.[0-9]{1,3}$', pre_release): base_version = base_version + '-' + pre_release - is_release_candidate = True + is_pre_release = True else: raise ValueError(f'Invalid Tag Value: {git_tag}') else: @@ -58,18 +58,18 @@ def prep_base_version(environ: Dict): else: base_version = 'noop' - overwrite_hathor_core_version = is_release or is_release_candidate or is_nightly + overwrite_hathor_core_version = is_release or is_pre_release or is_nightly # We don't know for sure at this point in which cases we should enable Slack notification, # but we know when we should disable it for sure - output['disable-slack-notification'] = not (is_release or is_release_candidate) + output['disable-slack-notification'] = not (is_release or is_pre_release) if GITHUB_REPOSITORY.lower() != 'hathornetwork/hathor-core': output['disable-slack-notification'] = True - return output, base_version, is_release_candidate, overwrite_hathor_core_version + return output, base_version, is_pre_release, overwrite_hathor_core_version -def prep_tags(environ: Dict, base_version: str, is_release_candidate: bool): +def prep_tags(environ: Dict, base_version: str, is_pre_release: bool): MATRIX_PYTHON_IMPL = environ.get('MATRIX_PYTHON_IMPL') MATRIX_PYTHON_VERSION = environ.get('MATRIX_PYTHON_VERSION') @@ -111,7 +111,7 @@ def extract_pyver(filename): tags.add(minor + '-' + suffix) if suffix == default_python: tags.add('latest') - elif GITHUB_EVENT_NAME == 'push' and not is_release_candidate: + elif GITHUB_EVENT_NAME == 'push' and not is_pre_release: tags.add('sha-' + GITHUB_SHA[:8]) # Build the image list and set outputs @@ -150,10 +150,10 @@ def overwrite_version(base_version: str): if __name__ == '__main__': - output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + output, base_version, is_pre_release, overwrite_hathor_core_version = prep_base_version(os.environ) print_output(output) - output = prep_tags(os.environ, base_version, is_release_candidate) + output = prep_tags(os.environ, base_version, is_pre_release) print_output(output) if overwrite_hathor_core_version: diff --git a/extras/github/test_docker.py b/extras/github/test_docker.py index b5db27e67..6a5cadc3a 100644 --- a/extras/github/test_docker.py +++ b/extras/github/test_docker.py @@ -3,8 +3,8 @@ from extras.github.docker import prep_base_version, prep_tags -DEFAULT_PYTHON_VERSION = '3.11' -NON_DEFAULT_PYTHON_VERSION = '3.10' +DEFAULT_PYTHON_VERSION = '3.12' +NON_DEFAULT_PYTHON_VERSION = '3.11' class DockerWorkflowTest(unittest.TestCase): diff --git a/flake.nix b/flake.nix index ba17e7ef4..1aff1bea8 100644 --- a/flake.nix +++ b/flake.nix @@ -17,7 +17,7 @@ in pkgs.mkShell { buildInputs = [ - pkgs.python310 + pkgs.python312 pkgs.poetry pkgs.rocksdb pkgs.snappy @@ -33,7 +33,7 @@ shellHook = '' export CFLAGS="-I${pkgs.rocksdb}/include" export LDFLAGS="-L${pkgs.rocksdb}/lib" - poetry env use python3.10 + poetry env use python3.12 ''; }; }); diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index c53c10f2f..a16b00d7f 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from enum import Enum, IntEnum +import tempfile +from enum import IntEnum from typing import Any, Callable, NamedTuple, Optional, TypeAlias from structlog import get_logger -from typing_extensions import assert_never from hathor.checkpoint import Checkpoint from hathor.conf.settings import HathorSettings as HathorSettingsType @@ -24,28 +24,28 @@ from hathor.consensus.poa import PoaBlockProducer, PoaSigner from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event import EventManager -from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage +from hathor.event.storage import EventRocksDBStorage, EventStorage from hathor.event.websocket import EventWebsocketFactory from hathor.execution_manager import ExecutionManager from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.storage.feature_activation_storage import FeatureActivationStorage -from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager +from hathor.indexes import IndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService +from hathor.nanocontracts import NCRocksDBStorageFactory, NCStorageFactory +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.nc_exec_logs import NCLogConfig, NCLogStorage +from hathor.nanocontracts.runner.runner import RunnerFactory +from hathor.nanocontracts.sorter.types import NCSorterCallable from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor from hathor.storage import RocksDBStorage from hathor.stratum import StratumFactory -from hathor.transaction.storage import ( - TransactionCacheStorage, - TransactionMemoryStorage, - TransactionRocksDBStorage, - TransactionStorage, -) +from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage, TransactionStorage from hathor.transaction.vertex_parser import VertexParser from hathor.util import Random, get_environment_info from hathor.verification.verification_service import VerificationService @@ -87,11 +87,6 @@ def add_factories( p2p_manager.enable_sync_version(SyncVersion.V2) -class StorageType(Enum): - MEMORY = 'memory' - ROCKSDB = 'rocksdb' - - class BuildArtifacts(NamedTuple): """Artifacts created by a builder.""" peer: PrivatePeer @@ -107,7 +102,7 @@ class BuildArtifacts(NamedTuple): bit_signaling_service: BitSignalingService indexes: Optional[IndexesManager] wallet: Optional[BaseWallet] - rocksdb_storage: Optional[RocksDBStorage] + rocksdb_storage: RocksDBStorage stratum_factory: Optional[StratumFactory] @@ -123,7 +118,7 @@ class Builder: Example: builder = Builder() - builder.use_memory() + builder.enable_event_queue() artifacts = builder.build() """ def __init__(self) -> None: @@ -138,9 +133,6 @@ def __init__(self) -> None: self._peer: Optional[PrivatePeer] = None self._cmdline: str = '' - self._storage_type: StorageType = StorageType.MEMORY - self._force_memory_index: bool = False - self._event_manager: Optional[EventManager] = None self._enable_event_queue: Optional[bool] = None @@ -156,7 +148,7 @@ def __init__(self) -> None: self._vertex_verifiers_builder: _VertexVerifiersBuilder | None = None self._verification_service: Optional[VerificationService] = None - self._rocksdb_path: Optional[str] = None + self._rocksdb_path: str | tempfile.TemporaryDirectory | None = None self._rocksdb_storage: Optional[RocksDBStorage] = None self._rocksdb_cache_capacity: Optional[int] = None @@ -177,13 +169,12 @@ def __init__(self) -> None: self._enable_address_index: bool = False self._enable_tokens_index: bool = False self._enable_utxo_index: bool = False + self._enable_nc_indexes: bool = False self._sync_v2_support: SyncSupportLevel = SyncSupportLevel.ENABLED self._enable_stratum_server: Optional[bool] = None - self._full_verification: Optional[bool] = None - self._soft_voided_tx_ids: Optional[set[bytes]] = None self._execution_manager: ExecutionManager | None = None @@ -197,6 +188,13 @@ def __init__(self) -> None: self._enable_ipv6: bool = False self._disable_ipv4: bool = False + self._nc_anti_mev: bool = True + + self._nc_storage_factory: NCStorageFactory | None = None + self._nc_log_storage: NCLogStorage | None = None + self._runner_factory: RunnerFactory | None = None + self._nc_log_config: NCLogConfig = NCLogConfig.NONE + def build(self) -> BuildArtifacts: if self.artifacts is not None: raise ValueError('cannot call build twice') @@ -219,6 +217,7 @@ def build(self) -> BuildArtifacts: event_manager = self._get_or_create_event_manager() indexes = self._get_or_create_indexes_manager() tx_storage = self._get_or_create_tx_storage() + rocksdb_storage = self._get_or_create_rocksdb_storage() feature_service = self._get_or_create_feature_service() bit_signaling_service = self._get_or_create_bit_signaling_service() verification_service = self._get_or_create_verification_service() @@ -227,6 +226,10 @@ def build(self) -> BuildArtifacts: vertex_handler = self._get_or_create_vertex_handler() vertex_parser = self._get_or_create_vertex_parser() poa_block_producer = self._get_or_create_poa_block_producer() + runner_factory = self._get_or_create_runner_factory() + + if settings.ENABLE_NANO_CONTRACTS: + tx_storage.nc_catalog = self._get_nc_catalog() if self._enable_address_index: indexes.enable_address_index(pubsub) @@ -237,10 +240,10 @@ def build(self) -> BuildArtifacts: if self._enable_utxo_index: indexes.enable_utxo_index() - kwargs: dict[str, Any] = {} + if self._enable_nc_indexes: + indexes.enable_nc_indexes() - if self._full_verification is not None: - kwargs['full_verification'] = self._full_verification + kwargs: dict[str, Any] = {} if self._enable_event_queue is not None: kwargs['enable_event_queue'] = self._enable_event_queue @@ -267,6 +270,7 @@ def build(self) -> BuildArtifacts: vertex_handler=vertex_handler, vertex_parser=vertex_parser, poa_block_producer=poa_block_producer, + runner_factory=runner_factory, **kwargs ) @@ -290,10 +294,10 @@ def build(self) -> BuildArtifacts: tx_storage=tx_storage, indexes=indexes, wallet=wallet, - rocksdb_storage=self._rocksdb_storage, + rocksdb_storage=rocksdb_storage, stratum_factory=stratum_factory, feature_service=feature_service, - bit_signaling_service=bit_signaling_service + bit_signaling_service=bit_signaling_service, ) return self.artifacts @@ -368,14 +372,67 @@ def _get_or_create_execution_manager(self) -> ExecutionManager: return self._execution_manager + def _get_or_create_nc_storage_factory(self) -> NCStorageFactory: + if self._nc_storage_factory is not None: + return self._nc_storage_factory + + rocksdb_storage = self._get_or_create_rocksdb_storage() + self._nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + return self._nc_storage_factory + + def _get_nc_calls_sorter(self) -> NCSorterCallable: + if self._nc_anti_mev: + from hathor.nanocontracts.sorter.random_sorter import random_nc_calls_sorter + return random_nc_calls_sorter + else: + from hathor.nanocontracts.sorter.timestamp_sorter import timestamp_nc_calls_sorter + return timestamp_nc_calls_sorter + + def _get_or_create_nc_log_storage(self) -> NCLogStorage: + if self._nc_log_storage is not None: + return self._nc_log_storage + + rocksdb_storage = self._get_or_create_rocksdb_storage() + self._nc_log_storage = NCLogStorage( + settings=self._get_or_create_settings(), + path=rocksdb_storage.path, + config=self._nc_log_config, + ) + return self._nc_log_storage + def _get_or_create_consensus(self) -> ConsensusAlgorithm: if self._consensus is None: soft_voided_tx_ids = self._get_soft_voided_tx_ids() pubsub = self._get_or_create_pubsub() - self._consensus = ConsensusAlgorithm(soft_voided_tx_ids, pubsub) + nc_storage_factory = self._get_or_create_nc_storage_factory() + nc_calls_sorter = self._get_nc_calls_sorter() + self._consensus = ConsensusAlgorithm( + nc_storage_factory=nc_storage_factory, + soft_voided_tx_ids=soft_voided_tx_ids, + pubsub=pubsub, + settings=self._get_or_create_settings(), + runner_factory=self._get_or_create_runner_factory(), + nc_log_storage=self._get_or_create_nc_log_storage(), + nc_calls_sorter=nc_calls_sorter, + ) return self._consensus + def _get_nc_catalog(self) -> NCBlueprintCatalog: + from hathor.nanocontracts.catalog import generate_catalog_from_settings + settings = self._get_or_create_settings() + return generate_catalog_from_settings(settings) + + def _get_or_create_runner_factory(self) -> RunnerFactory: + if self._runner_factory is None: + self._runner_factory = RunnerFactory( + reactor=self._get_reactor(), + settings=self._get_or_create_settings(), + tx_storage=self._get_or_create_tx_storage(), + nc_storage_factory=self._get_or_create_nc_storage_factory(), + ) + return self._runner_factory + def _get_or_create_pubsub(self) -> PubSubManager: if self._pubsub is None: self._pubsub = PubSubManager(self._get_reactor()) @@ -388,20 +445,11 @@ def _create_stratum_server(self, manager: HathorManager) -> StratumFactory: return stratum_factory def _get_or_create_rocksdb_storage(self) -> RocksDBStorage: - assert self._rocksdb_path is not None - - if self._rocksdb_storage is not None: - return self._rocksdb_storage - - kwargs = {} - if self._rocksdb_cache_capacity is not None: - kwargs = dict(cache_capacity=self._rocksdb_cache_capacity) - - self._rocksdb_storage = RocksDBStorage( - path=self._rocksdb_path, - **kwargs - ) - + if self._rocksdb_storage is None: + self._rocksdb_storage = RocksDBStorage( + path=self._rocksdb_path, + cache_capacity=self._rocksdb_cache_capacity, + ) if self._rocksdb_path else RocksDBStorage.create_temp(self._rocksdb_cache_capacity) return self._rocksdb_storage def _get_or_create_p2p_manager(self) -> ConnectionsManager: @@ -433,19 +481,12 @@ def _get_or_create_p2p_manager(self) -> ConnectionsManager: return self._p2p_manager def _get_or_create_indexes_manager(self) -> IndexesManager: - if self._indexes_manager is not None: - return self._indexes_manager - - if self._force_memory_index or self._storage_type == StorageType.MEMORY: - self._indexes_manager = MemoryIndexesManager(settings=self._get_or_create_settings()) - - elif self._storage_type == StorageType.ROCKSDB: + if self._indexes_manager is None: rocksdb_storage = self._get_or_create_rocksdb_storage() - self._indexes_manager = RocksDBIndexesManager(rocksdb_storage) - - else: - raise NotImplementedError - + self._indexes_manager = RocksDBIndexesManager( + rocksdb_storage, + settings=self._get_or_create_settings(), + ) return self._indexes_manager def _get_or_create_tx_storage(self) -> TransactionStorage: @@ -461,21 +502,16 @@ def _get_or_create_tx_storage(self) -> TransactionStorage: if self._tx_storage_cache: store_indexes = None - if self._storage_type == StorageType.MEMORY: - self._tx_storage = TransactionMemoryStorage(indexes=store_indexes, settings=settings) - - elif self._storage_type == StorageType.ROCKSDB: - rocksdb_storage = self._get_or_create_rocksdb_storage() - vertex_parser = self._get_or_create_vertex_parser() - self._tx_storage = TransactionRocksDBStorage( - rocksdb_storage, - indexes=store_indexes, - settings=settings, - vertex_parser=vertex_parser, - ) - - else: - raise NotImplementedError + rocksdb_storage = self._get_or_create_rocksdb_storage() + nc_storage_factory = self._get_or_create_nc_storage_factory() + vertex_parser = self._get_or_create_vertex_parser() + self._tx_storage = TransactionRocksDBStorage( + rocksdb_storage, + indexes=store_indexes, + settings=settings, + vertex_parser=vertex_parser, + nc_storage_factory=nc_storage_factory, + ) if self._tx_storage_cache: reactor = self._get_reactor() @@ -483,22 +519,20 @@ def _get_or_create_tx_storage(self) -> TransactionStorage: if self._tx_storage_cache_capacity is not None: kwargs['capacity'] = self._tx_storage_cache_capacity self._tx_storage = TransactionCacheStorage( - self._tx_storage, reactor, indexes=indexes, settings=settings, **kwargs + self._tx_storage, + reactor, + indexes=indexes, + settings=settings, + nc_storage_factory=nc_storage_factory, + **kwargs ) return self._tx_storage def _get_or_create_event_storage(self) -> EventStorage: - if self._event_storage is not None: - pass - elif self._storage_type == StorageType.MEMORY: - self._event_storage = EventMemoryStorage() - elif self._storage_type == StorageType.ROCKSDB: + if self._event_storage is None: rocksdb_storage = self._get_or_create_rocksdb_storage() self._event_storage = EventRocksDBStorage(rocksdb_storage) - else: - raise NotImplementedError - return self._event_storage def _get_or_create_event_manager(self) -> EventManager: @@ -562,14 +596,11 @@ def _get_or_create_verification_service(self) -> VerificationService: return self._verification_service - def _get_or_create_feature_storage(self) -> FeatureActivationStorage | None: - match self._storage_type: - case StorageType.MEMORY: return None - case StorageType.ROCKSDB: return FeatureActivationStorage( - settings=self._get_or_create_settings(), - rocksdb_storage=self._get_or_create_rocksdb_storage() - ) - case _: assert_never(self._storage_type) + def _get_or_create_feature_storage(self) -> FeatureActivationStorage: + return FeatureActivationStorage( + settings=self._get_or_create_settings(), + rocksdb_storage=self._get_or_create_rocksdb_storage() + ) def _get_or_create_vertex_verifiers(self) -> VertexVerifiers: if self._vertex_verifiers is None: @@ -638,33 +669,28 @@ def _get_or_create_poa_block_producer(self) -> PoaBlockProducer | None: return self._poa_block_producer - def use_memory(self) -> 'Builder': + def set_rocksdb_path(self, path: str | tempfile.TemporaryDirectory) -> 'Builder': + if self._tx_storage: + raise ValueError('cannot set rocksdb path after tx storage is set') self.check_if_can_modify() - self._storage_type = StorageType.MEMORY + self._rocksdb_path = path return self - def use_rocksdb( - self, - path: str, - cache_capacity: Optional[int] = None - ) -> 'Builder': + def set_rocksdb_cache_capacity(self, cache_capacity: int) -> 'Builder': + if self._tx_storage: + raise ValueError('cannot set rocksdb cache capacity after tx storage is set') self.check_if_can_modify() - self._storage_type = StorageType.ROCKSDB - self._rocksdb_path = path self._rocksdb_cache_capacity = cache_capacity return self def use_tx_storage_cache(self, capacity: Optional[int] = None) -> 'Builder': + if self._tx_storage: + raise ValueError('cannot set tx storage cache capacity after tx storage is set') self.check_if_can_modify() self._tx_storage_cache = True self._tx_storage_cache_capacity = capacity return self - def force_memory_index(self) -> 'Builder': - self.check_if_can_modify() - self._force_memory_index = True - return self - def _get_or_create_wallet(self) -> Optional[BaseWallet]: if self._wallet is not None: return self._wallet @@ -693,21 +719,34 @@ def enable_stratum_server(self) -> 'Builder': return self def enable_address_index(self) -> 'Builder': + if self._tx_storage or self._indexes_manager: + raise ValueError('cannot enable index after tx storage or indexes manager is set') self.check_if_can_modify() self._enable_address_index = True return self def enable_tokens_index(self) -> 'Builder': + if self._tx_storage or self._indexes_manager: + raise ValueError('cannot enable index after tx storage or indexes manager is set') self.check_if_can_modify() self._enable_tokens_index = True return self def enable_utxo_index(self) -> 'Builder': + if self._tx_storage or self._indexes_manager: + raise ValueError('cannot enable index after tx storage or indexes manager is set') self.check_if_can_modify() self._enable_utxo_index = True return self + def enable_nc_indexes(self) -> 'Builder': + self.check_if_can_modify() + self._enable_nc_indexes = True + return self + def enable_wallet_index(self) -> 'Builder': + if self._tx_storage or self._indexes_manager: + raise ValueError('cannot enable index after tx storage or indexes manager is set') self.check_if_can_modify() self.enable_address_index() self.enable_tokens_index() @@ -721,6 +760,9 @@ def enable_event_queue(self) -> 'Builder': def set_tx_storage(self, tx_storage: TransactionStorage) -> 'Builder': self.check_if_can_modify() self._tx_storage = tx_storage + internal = tx_storage.store if isinstance(tx_storage, TransactionCacheStorage) else tx_storage + assert isinstance(internal, TransactionRocksDBStorage) + self._rocksdb_storage = internal._rocksdb_storage return self def set_event_storage(self, event_storage: EventStorage) -> 'Builder': @@ -778,29 +820,24 @@ def disable_sync_v2(self) -> 'Builder': self._sync_v2_support = SyncSupportLevel.DISABLED return self - def set_full_verification(self, full_verification: bool) -> 'Builder': - self.check_if_can_modify() - self._full_verification = full_verification - return self - - def enable_full_verification(self) -> 'Builder': + def enable_ipv6(self) -> 'Builder': self.check_if_can_modify() - self._full_verification = True + self._enable_ipv6 = True return self - def disable_full_verification(self) -> 'Builder': + def disable_ipv4(self) -> 'Builder': self.check_if_can_modify() - self._full_verification = False + self._disable_ipv4 = True return self - def enable_ipv6(self) -> 'Builder': + def enable_nc_anti_mev(self) -> 'Builder': self.check_if_can_modify() - self._enable_ipv6 = True + self._nc_anti_mev = True return self - def disable_ipv4(self) -> 'Builder': + def disable_nc_anti_mev(self) -> 'Builder': self.check_if_can_modify() - self._disable_ipv4 = True + self._nc_anti_mev = False return self def set_soft_voided_tx_ids(self, soft_voided_tx_ids: set[bytes]) -> 'Builder': @@ -828,3 +865,8 @@ def set_poa_signer(self, signer: PoaSigner) -> 'Builder': self.check_if_can_modify() self._poa_signer = signer return self + + def set_nc_log_config(self, config: NCLogConfig) -> 'Builder': + self.check_if_can_modify() + self._nc_log_config = config + return self diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 9d4dc7fda..ed24f1846 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -30,9 +30,11 @@ from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.storage.feature_activation_storage import FeatureActivationStorage -from hathor.indexes import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager +from hathor.indexes import IndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService +from hathor.nanocontracts.nc_exec_logs import NCLogStorage +from hathor.nanocontracts.runner.runner import RunnerFactory from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_endpoint import PeerEndpoint @@ -66,22 +68,23 @@ def check_or_raise(self, condition: bool, message: str) -> None: if not condition: raise BuilderError(message) + def check_or_warn(self, condition: bool, message: str) -> None: + """Will log a warning `message` if `condition` is False.""" + if not condition: + self.log.warn(message) + def create_manager(self, reactor: Reactor) -> HathorManager: import hathor from hathor.builder import SyncSupportLevel from hathor.conf.get_settings import get_global_settings, get_settings_source from hathor.daa import TestMode - from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage + from hathor.event.storage import EventRocksDBStorage, EventStorage from hathor.event.websocket.factory import EventWebsocketFactory + from hathor.nanocontracts import NCRocksDBStorageFactory, NCStorageFactory from hathor.p2p.netfilter.utils import add_peer_id_blacklist from hathor.p2p.peer_discovery import BootstrapPeerDiscovery, DNSPeerDiscovery from hathor.storage import RocksDBStorage - from hathor.transaction.storage import ( - TransactionCacheStorage, - TransactionMemoryStorage, - TransactionRocksDBStorage, - TransactionStorage, - ) + from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage, TransactionStorage from hathor.util import get_environment_info settings = get_global_settings() @@ -112,51 +115,49 @@ def create_manager(self, reactor: Reactor) -> HathorManager: ) # XXX Remove this protection after Nano Contracts are launched. - if settings.NETWORK_NAME not in {'nano-testnet-alpha', 'unittests'}: + if settings.NETWORK_NAME not in ('unittests', 'nano-testnet-bravo', 'testnet-hotel'): # Add protection to prevent enabling Nano Contracts due to misconfigurations. - self.check_or_raise(not settings.ENABLE_NANO_CONTRACTS, - 'configuration error: NanoContracts can only be enabled on localnets for now') + self.check_or_raise( + not settings.ENABLE_NANO_CONTRACTS, + 'configuration error: NanoContracts can only be enabled on specific networks for now', + ) vertex_parser = VertexParser(settings=settings) tx_storage: TransactionStorage event_storage: EventStorage indexes: IndexesManager - feature_storage: FeatureActivationStorage | None = None - self.rocksdb_storage: Optional[RocksDBStorage] = None self.event_ws_factory: Optional[EventWebsocketFactory] = None - if self._args.memory_storage: - self.check_or_raise(not self._args.data, '--data should not be used with --memory-storage') - # if using MemoryStorage, no need to have cache - indexes = MemoryIndexesManager() - tx_storage = TransactionMemoryStorage(indexes, settings=settings) - event_storage = EventMemoryStorage() - self.check_or_raise(not self._args.x_rocksdb_indexes, 'RocksDB indexes require RocksDB data') - self.log.info('with storage', storage_class=type(tx_storage).__name__) - else: - self.check_or_raise(bool(self._args.data), '--data is expected') - assert self._args.data is not None - if self._args.rocksdb_storage: - self.log.warn('--rocksdb-storage is now implied, no need to specify it') - cache_capacity = self._args.rocksdb_cache - self.rocksdb_storage = RocksDBStorage(path=self._args.data, cache_capacity=cache_capacity) - - # Initialize indexes manager. - if self._args.memory_indexes: - indexes = MemoryIndexesManager() - else: - indexes = RocksDBIndexesManager(self.rocksdb_storage) - - kwargs: dict[str, Any] = {} - if self._args.disable_cache: - # We should only pass indexes if cache is disabled. Otherwise, - # only TransactionCacheStorage should have indexes. - kwargs['indexes'] = indexes - tx_storage = TransactionRocksDBStorage( - self.rocksdb_storage, settings=settings, vertex_parser=vertex_parser, **kwargs - ) - event_storage = EventRocksDBStorage(self.rocksdb_storage) - feature_storage = FeatureActivationStorage(settings=settings, rocksdb_storage=self.rocksdb_storage) + memory_msg = 'is deprecated. use --temp-data instead' + self.check_or_raise(not self._args.memory_storage, f'--memory-storage {memory_msg}') + self.check_or_raise(not self._args.memory_indexes, f'--memory-indexes {memory_msg}') + + self.check_or_raise(bool(self._args.data) or self._args.temp_data, 'either --data or --temp-data is expected') + cache_capacity = self._args.rocksdb_cache + self.rocksdb_storage = ( + RocksDBStorage(path=self._args.data, cache_capacity=cache_capacity) + if self._args.data else RocksDBStorage.create_temp(cache_capacity) + ) + + self.nc_storage_factory: NCStorageFactory = NCRocksDBStorageFactory(self.rocksdb_storage) + + # Initialize indexes manager. + indexes = RocksDBIndexesManager(self.rocksdb_storage, settings=settings) + + kwargs: dict[str, Any] = {} + if self._args.disable_cache: + # We should only pass indexes if cache is disabled. Otherwise, + # only TransactionCacheStorage should have indexes. + kwargs['indexes'] = indexes + tx_storage = TransactionRocksDBStorage( + self.rocksdb_storage, + settings=settings, + vertex_parser=vertex_parser, + nc_storage_factory=self.nc_storage_factory, + **kwargs + ) + event_storage = EventRocksDBStorage(self.rocksdb_storage) + feature_storage = FeatureActivationStorage(settings=settings, rocksdb_storage=self.rocksdb_storage) self.log.info('with storage', storage_class=type(tx_storage).__name__, path=self._args.data) @@ -167,14 +168,14 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.check_or_raise(self._args.cache_size is None, 'cannot use --disable-cache with --cache-size') self.check_or_raise(self._args.cache_interval is None, 'cannot use --disable-cache with --cache-interval') - if self._args.memory_storage: - if self._args.cache_size: - self.log.warn('using --cache-size with --memory-storage has no effect') - if self._args.cache_interval: - self.log.warn('using --cache-interval with --memory-storage has no effect') - - if not self._args.disable_cache and not self._args.memory_storage: - tx_storage = TransactionCacheStorage(tx_storage, reactor, indexes=indexes, settings=settings) + if not self._args.disable_cache: + tx_storage = TransactionCacheStorage( + tx_storage, + reactor, + indexes=indexes, + settings=settings, + nc_storage_factory=self.nc_storage_factory, + ) tx_storage.capacity = self._args.cache_size if self._args.cache_size is not None else DEFAULT_CACHE_SIZE if self._args.cache_interval: tx_storage.interval = self._args.cache_interval @@ -183,6 +184,10 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.tx_storage = tx_storage self.log.info('with indexes', indexes_class=type(tx_storage.indexes).__name__) + if settings.ENABLE_NANO_CONTRACTS: + from hathor.nanocontracts.catalog import generate_catalog_from_settings + self.tx_storage.nc_catalog = generate_catalog_from_settings(settings) + self.wallet = None if self._args.wallet: self.wallet = self.create_wallet() @@ -190,20 +195,13 @@ def create_manager(self, reactor: Reactor) -> HathorManager: hostname = self.get_hostname() - if self._args.sync_bridge: - raise BuilderError('--sync-bridge was removed') - elif self._args.sync_v1_only: - raise BuilderError('--sync-v1-only was removed') - elif self._args.sync_v2_only: - self.log.warn('--sync-v2-only is the default, this parameter has no effect') - elif self._args.x_remove_sync_v1: - self.log.warn('--x-remove-sync-v1 is deprecated and has no effect') - elif self._args.x_sync_bridge: - raise BuilderError('--x-sync-bridge was removed') - elif self._args.x_sync_v1_only: - raise BuilderError('--x-sync-v1-only was removed') - elif self._args.x_sync_v2_only: - self.log.warn('--x-sync-v2-only is deprecated and will be removed') + self.check_or_raise(not self._args.sync_bridge, '--sync-bridge was removed') + self.check_or_raise(not self._args.sync_v1_only, '--sync-v1-only was removed') + self.check_or_raise(not self._args.x_sync_bridge, '--x-sync-bridge was removed') + self.check_or_raise(not self._args.x_sync_v1_only, '--x-sync-v1-only was removed') + self.check_or_warn(not self._args.sync_v2_only, '--sync-v2-only is the default, this parameter has no effect') + self.check_or_warn(not self._args.x_remove_sync_v1, '--x-remove-sync-v1 is deprecated and has no effect') + self.check_or_warn(not self._args.x_sync_v2_only, '--x-sync-v2-only is deprecated and will be removed') pubsub = PubSubManager(reactor) @@ -236,12 +234,36 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log.debug('enable utxo index') tx_storage.indexes.enable_utxo_index() - self.check_or_raise(not self._args.x_full_verification, '--x-full-verification is deprecated') + if self._args.nc_indexes and tx_storage.indexes is not None: + self.log.debug('enable nano indexes') + tx_storage.indexes.enable_nc_indexes() + + from hathor.nanocontracts.sorter.random_sorter import random_nc_calls_sorter + nc_calls_sorter = random_nc_calls_sorter + + assert self.nc_storage_factory is not None + runner_factory = RunnerFactory( + reactor=reactor, + settings=settings, + tx_storage=tx_storage, + nc_storage_factory=self.nc_storage_factory, + ) + + nc_log_storage = NCLogStorage( + settings=settings, + path=self.rocksdb_storage.path, + config=self._args.nc_exec_logs, + ) soft_voided_tx_ids = set(settings.SOFT_VOIDED_TX_IDS) consensus_algorithm = ConsensusAlgorithm( + self.nc_storage_factory, soft_voided_tx_ids, pubsub=pubsub, + settings=settings, + runner_factory=runner_factory, + nc_log_storage=nc_log_storage, + nc_calls_sorter=nc_calls_sorter, ) if self._args.x_enable_event_queue or self._args.enable_event_queue: @@ -333,7 +355,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: wallet=self.wallet, checkpoints=settings.CHECKPOINTS, environment_info=get_environment_info(args=str(self._args), peer_id=str(peer.id)), - full_verification=False, enable_event_queue=self._args.x_enable_event_queue or self._args.enable_event_queue, bit_signaling_service=bit_signaling_service, verification_service=verification_service, @@ -342,6 +363,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: vertex_handler=vertex_handler, vertex_parser=vertex_parser, poa_block_producer=poa_block_producer, + runner_factory=runner_factory, ) if self._args.x_ipython_kernel: @@ -381,14 +403,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: entrypoints = [PeerEndpoint.parse(desc) for desc in self._args.bootstrap] p2p_manager.add_peer_discovery(BootstrapPeerDiscovery(entrypoints)) - if self._args.x_rocksdb_indexes: - self.log.warn('--x-rocksdb-indexes is now the default, no need to specify it') - if self._args.memory_indexes: - raise BuilderError('You cannot use --memory-indexes and --x-rocksdb-indexes.') - - if self._args.memory_indexes and self._args.memory_storage: - self.log.warn('--memory-indexes is implied for memory storage or JSON storage') - for description in self._args.listen: p2p_manager.add_listen_address_description(description) diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index bfc47e7a7..0dd2be844 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -24,6 +24,10 @@ from hathor.event.resources.event import EventResource from hathor.exception import BuilderError from hathor.feature_activation.feature_service import FeatureService +from hathor.nanocontracts.resources.builtin import BlueprintBuiltinResource +from hathor.nanocontracts.resources.nc_creation import NCCreationResource +from hathor.nanocontracts.resources.nc_exec_logs import NCExecLogsResource +from hathor.nanocontracts.resources.on_chain import BlueprintOnChainResource from hathor.prometheus import PrometheusMetricsExporter if TYPE_CHECKING: @@ -250,6 +254,26 @@ def create_resources(self) -> server.Site: (b'utxo_search', UtxoSearchResource(self.manager), root), ]) + if settings.ENABLE_NANO_CONTRACTS: + from hathor.nanocontracts.resources import ( + BlueprintInfoResource, + BlueprintSourceCodeResource, + NanoContractHistoryResource, + NanoContractStateResource, + ) + nc_resource = Resource() + root.putChild(b'nano_contract', nc_resource) + blueprint_resource = Resource() + nc_resource.putChild(b'blueprint', blueprint_resource) + blueprint_resource.putChild(b'info', BlueprintInfoResource(self.manager)) + blueprint_resource.putChild(b'builtin', BlueprintBuiltinResource(self.manager)) + blueprint_resource.putChild(b'on_chain', BlueprintOnChainResource(self.manager)) + blueprint_resource.putChild(b'source', BlueprintSourceCodeResource(self.manager)) + nc_resource.putChild(b'history', NanoContractHistoryResource(self.manager)) + nc_resource.putChild(b'state', NanoContractStateResource(self.manager)) + nc_resource.putChild(b'creation', NCCreationResource(self.manager)) + nc_resource.putChild(b'logs', NCExecLogsResource(self.manager)) + if self._args.enable_debug_api: debug_resource = Resource() root.putChild(b'_debug', debug_resource) @@ -323,6 +347,7 @@ def create_resources(self) -> server.Site: # Set websocket factory in metrics. It'll be started when the manager is started. self.manager.websocket_factory = ws_factory + self.manager.metrics.websocket_factory = ws_factory self._built_status = True return status_server diff --git a/hathor/cli/check_blueprint.py b/hathor/cli/check_blueprint.py new file mode 100644 index 000000000..819d2b6a0 --- /dev/null +++ b/hathor/cli/check_blueprint.py @@ -0,0 +1,47 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +from argparse import FileType +from io import TextIOWrapper + + +def main() -> None: + from hathor.cli.util import create_parser + from hathor.conf import NANO_TESTNET_SETTINGS_FILEPATH + from hathor.conf.get_settings import get_global_settings + from hathor.nanocontracts import OnChainBlueprint + from hathor.nanocontracts.on_chain_blueprint import Code + from hathor.verification.on_chain_blueprint_verifier import OnChainBlueprintVerifier + + os.environ['HATHOR_CONFIG_YAML'] = NANO_TESTNET_SETTINGS_FILEPATH + + parser = create_parser() + parser.add_argument( + '--file', + type=FileType('r', encoding='UTF-8'), + help='The blueprint file', + required=True, + ) + args = parser.parse_args(sys.argv[1:]) + assert isinstance(args.file, TextIOWrapper) + + settings = get_global_settings() + code = Code.from_python_code(args.file.read(), settings) + verifier = OnChainBlueprintVerifier(settings=settings) + ocb = OnChainBlueprint(hash=b'', code=code) + + verifier.verify_code(ocb) + print('Blueprint is valid!') diff --git a/hathor/cli/db_import.py b/hathor/cli/db_import.py index 8063b31d2..2edde2f39 100644 --- a/hathor/cli/db_import.py +++ b/hathor/cli/db_import.py @@ -90,7 +90,7 @@ def _import_txs(self) -> Iterator['BaseTransaction']: tx = parser.deserialize(tx_bytes) assert tx is not None tx.storage = self.tx_storage - self.manager.on_new_tx(tx, quiet=True, fails_silently=False) + self.manager.on_new_tx(tx, quiet=True) yield tx diff --git a/hathor/cli/events_simulator/events_simulator.py b/hathor/cli/events_simulator/events_simulator.py index 23fe64418..f7fccff98 100644 --- a/hathor/cli/events_simulator/events_simulator.py +++ b/hathor/cli/events_simulator/events_simulator.py @@ -57,12 +57,13 @@ def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: possible_scenarios = [scenario.name for scenario in Scenario] raise ValueError(f'Invalid scenario "{args.scenario}". Choose one of {possible_scenarios}') from e + settings = get_global_settings()._replace(REWARD_SPEND_MIN_BLOCKS=scenario.get_reward_spend_min_blocks()) log = logger.new() simulator = Simulator(args.seed) simulator.start() builder = simulator.get_default_builder() \ - .disable_full_verification() \ - .enable_event_queue() + .enable_event_queue() \ + .set_settings(settings) manager = simulator.create_peer(builder) event_ws_factory = manager._event_manager._event_ws_factory @@ -71,7 +72,7 @@ def execute(args: Namespace, reactor: 'ReactorProtocol') -> None: forwarding_ws_factory = EventForwardingWebsocketFactory( simulator=simulator, peer_id='simulator_peer_id', - settings=get_global_settings(), + settings=settings, reactor=reactor, event_storage=event_ws_factory._event_storage ) diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index 7ee5b7917..93d560dd7 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -13,9 +13,11 @@ # limitations under the License. from enum import Enum -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: + from hathor.dag_builder import DAGBuilder + from hathor.dag_builder.artifacts import DAGArtifacts from hathor.manager import HathorManager from hathor.simulator import Simulator @@ -29,8 +31,10 @@ class Scenario(Enum): INVALID_MEMPOOL_TRANSACTION = 'INVALID_MEMPOOL_TRANSACTION' EMPTY_SCRIPT = 'EMPTY_SCRIPT' CUSTOM_SCRIPT = 'CUSTOM_SCRIPT' + NC_EVENTS = 'NC_EVENTS' + NC_EVENTS_REORG = 'NC_EVENTS_REORG' - def simulate(self, simulator: 'Simulator', manager: 'HathorManager') -> None: + def simulate(self, simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: simulate_fns = { Scenario.ONLY_LOAD: simulate_only_load, Scenario.SINGLE_CHAIN_ONE_BLOCK: simulate_single_chain_one_block, @@ -40,24 +44,35 @@ def simulate(self, simulator: 'Simulator', manager: 'HathorManager') -> None: Scenario.INVALID_MEMPOOL_TRANSACTION: simulate_invalid_mempool_transaction, Scenario.EMPTY_SCRIPT: simulate_empty_script, Scenario.CUSTOM_SCRIPT: simulate_custom_script, + Scenario.NC_EVENTS: simulate_nc_events, + Scenario.NC_EVENTS_REORG: simulate_nc_events_reorg, } simulate_fn = simulate_fns[self] - simulate_fn(simulator, manager) + return simulate_fn(simulator, manager) + def get_reward_spend_min_blocks(self) -> int: + """Get the REWARD_SPEND_MIN_BLOCKS settings required for this scenario.""" + return 1 if self in (Scenario.NC_EVENTS, Scenario.NC_EVENTS_REORG) else 10 -def simulate_only_load(simulator: 'Simulator', _manager: 'HathorManager') -> None: + +def simulate_only_load(simulator: 'Simulator', _manager: 'HathorManager') -> Optional['DAGArtifacts']: simulator.run(60) + return None -def simulate_single_chain_one_block(simulator: 'Simulator', manager: 'HathorManager') -> None: +def simulate_single_chain_one_block(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: from hathor.simulator.utils import add_new_blocks add_new_blocks(manager, 1) simulator.run(60) + return None -def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manager: 'HathorManager') -> None: +def simulate_single_chain_blocks_and_transactions( + simulator: 'Simulator', + manager: 'HathorManager', +) -> Optional['DAGArtifacts']: from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import add_new_blocks, gen_new_tx @@ -71,20 +86,22 @@ def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manage tx = gen_new_tx(manager, address, 1000) tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) simulator.run(60) tx = gen_new_tx(manager, address, 2000) tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) simulator.run(60) add_new_blocks(manager, 1) simulator.run(60) + return None + -def simulate_reorg(simulator: 'Simulator', manager: 'HathorManager') -> None: +def simulate_reorg(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: from hathor.simulator import FakeConnection from hathor.simulator.utils import add_new_blocks @@ -101,8 +118,10 @@ def simulate_reorg(simulator: 'Simulator', manager: 'HathorManager') -> None: simulator.add_connection(connection) simulator.run(60) + return None + -def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManager') -> None: +def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx @@ -117,7 +136,7 @@ def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManage tx = gen_new_tx(manager, address, 1000) tx.weight = 19.0005 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) simulator.run(60) # A clone is created with a greater timestamp and a lower weight. It's a voided twin tx. @@ -125,7 +144,7 @@ def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManage tx2.timestamp += 60 tx2.weight = 19 tx2.update_hash() - assert manager.propagate_tx(tx2, fails_silently=False) + assert manager.propagate_tx(tx2) simulator.run(60) # Only the second tx is voided @@ -140,20 +159,21 @@ def simulate_unvoided_transaction(simulator: 'Simulator', manager: 'HathorManage tx2.hash, ] block.update_hash() - assert manager.propagate_tx(block, fails_silently=False) + assert manager.propagate_tx(block) simulator.run(60) # The first tx gets voided and the second gets unvoided assert tx.get_metadata().voided_by assert not tx2.get_metadata().voided_by + return None -def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'HathorManager') -> None: - from hathor.conf.get_settings import get_global_settings + +def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: from hathor.simulator.utils import add_new_blocks, gen_new_tx from hathor.transaction import Block - settings = get_global_settings() + settings = manager._settings assert manager.wallet is not None address = manager.wallet.get_unused_address(mark_as_used=False) @@ -165,7 +185,7 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho tx = gen_new_tx(manager, address, 1000) tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) simulator.run(60) balance_per_address = manager.wallet.get_balance_per_address(settings.HATHOR_TOKEN_UID) assert balance_per_address[address] == 1000 @@ -176,7 +196,7 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho b0: Block = tb0.generate_mining_block(manager.rng, storage=manager.tx_storage) b0.weight = 10 manager.cpu_mining_service.resolve(b0) - assert manager.propagate_tx(b0, fails_silently=False) + assert manager.propagate_tx(b0) simulator.run(60) # the transaction should have been removed from the mempool and the storage after the re-org @@ -186,8 +206,10 @@ def simulate_invalid_mempool_transaction(simulator: 'Simulator', manager: 'Hatho balance_per_address = manager.wallet.get_balance_per_address(settings.HATHOR_TOKEN_UID) assert balance_per_address[address] == 6400 + return None + -def simulate_empty_script(simulator: 'Simulator', manager: 'HathorManager') -> None: +def simulate_empty_script(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import add_new_blocks, gen_new_tx from hathor.transaction import TxInput, TxOutput @@ -204,7 +226,7 @@ def simulate_empty_script(simulator: 'Simulator', manager: 'HathorManager') -> N tx1.outputs[1].script = b'' tx1.weight = manager.daa.minimum_tx_weight(tx1) tx1.update_hash() - assert manager.propagate_tx(tx1, fails_silently=False) + assert manager.propagate_tx(tx1) simulator.run(60) tx2 = gen_new_tx(manager, address, 1000) @@ -212,14 +234,16 @@ def simulate_empty_script(simulator: 'Simulator', manager: 'HathorManager') -> N tx2.outputs = [TxOutput(value=1000, script=original_script)] tx2.weight = manager.daa.minimum_tx_weight(tx2) tx2.update_hash() - assert manager.propagate_tx(tx2, fails_silently=False) + assert manager.propagate_tx(tx2) simulator.run(60) add_new_blocks(manager, 1) simulator.run(60) + return None + -def simulate_custom_script(simulator: 'Simulator', manager: 'HathorManager') -> None: +def simulate_custom_script(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: from hathor.conf.get_settings import get_global_settings from hathor.simulator.utils import add_new_blocks, gen_new_tx from hathor.transaction import TxInput, TxOutput @@ -242,7 +266,7 @@ def simulate_custom_script(simulator: 'Simulator', manager: 'HathorManager') -> tx1.outputs[1].script = s.data tx1.weight = manager.daa.minimum_tx_weight(tx1) tx1.update_hash() - assert manager.propagate_tx(tx1, fails_silently=False) + assert manager.propagate_tx(tx1) simulator.run(60) tx2 = gen_new_tx(manager, address, 1000) @@ -250,8 +274,137 @@ def simulate_custom_script(simulator: 'Simulator', manager: 'HathorManager') -> tx2.outputs = [TxOutput(value=1000, script=original_script)] tx2.weight = manager.daa.minimum_tx_weight(tx2) tx2.update_hash() - assert manager.propagate_tx(tx2, fails_silently=False) + assert manager.propagate_tx(tx2) simulator.run(60) add_new_blocks(manager, 1) simulator.run(60) + + return None + + +def simulate_nc_events(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: + from hathor.nanocontracts import Blueprint, NCFail, public + from hathor.nanocontracts.catalog import NCBlueprintCatalog + from hathor.nanocontracts.context import Context + from hathor.nanocontracts.types import ContractId + + class TestEventsBlueprint1(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + self.syscall.emit_event(b'test event on initialize 1') + + @public + def fail(self, ctx: Context) -> None: + # This will not be emitted because the tx will fail. + self.syscall.emit_event(b'test event on fail') + raise NCFail + + @public + def call_another(self, ctx: Context, contract_id: ContractId) -> None: + self.syscall.emit_event(b'test event on call_another') + self.syscall.call_public_method(contract_id, 'some_method', []) + + class TestEventsBlueprint2(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + self.syscall.emit_event(b'test event on initialize 2') + + @public + def some_method(self, ctx: Context) -> None: + self.syscall.emit_event(b'test event on some_method') + + blueprint1_id = b'\x11' * 32 + blueprint2_id = b'\x22' * 32 + manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + blueprint1_id: TestEventsBlueprint1, + blueprint2_id: TestEventsBlueprint2, + }) + dag_builder = _create_dag_builder(manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..3] + b1 < dummy + + # test simple event + nc1.nc_id = "{blueprint1_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = "{blueprint2_id.hex()}" + nc2.nc_method = initialize() + + # test events across contracts + nc3.nc_id = nc1 + nc3.nc_method = call_another(`nc2`) + + # test NC failure + nc4.nc_id = nc1 + nc4.nc_method = fail() + + nc1 <-- nc2 <-- nc3 <-- nc4 + nc2 <-- b2 + nc4 <-- b3 + nc4 < b2 + ''') + artifacts.propagate_with(manager, up_to='b2') + simulator.run(1) + artifacts.propagate_with(manager) + simulator.run(1) + + return artifacts + + +def simulate_nc_events_reorg(simulator: 'Simulator', manager: 'HathorManager') -> Optional['DAGArtifacts']: + from hathor.nanocontracts import Blueprint, public + from hathor.nanocontracts.catalog import NCBlueprintCatalog + from hathor.nanocontracts.context import Context + + class TestEventsBlueprint1(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + self.syscall.emit_event(b'test event on initialize 1') + + blueprint1_id = b'\x11' * 32 + manager.tx_storage.nc_catalog = NCBlueprintCatalog({blueprint1_id: TestEventsBlueprint1}) + dag_builder = _create_dag_builder(manager) + + # 2 reorgs happen, so nc1.initialize() gets executed 3 times, once in block a2 and twice in block b2 + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..4] + blockchain b1 a[2..3] + b1 < dummy + b2 < a2 < a3 < b3 < b4 + + nc1.nc_id = "{blueprint1_id.hex()}" + nc1.nc_method = initialize() + + nc1 <-- b2 + nc1 <-- a2 + ''') + + artifacts.propagate_with(manager) + simulator.run(1) + + return artifacts + + +def _create_dag_builder(manager: 'HathorManager') -> 'DAGBuilder': + from mnemonic import Mnemonic + + from hathor.dag_builder import DAGBuilder + from hathor.wallet import HDWallet + + seed = ('coral light army gather adapt blossom school alcohol coral light army gather ' + 'adapt blossom school alcohol coral light army gather adapt blossom school awesome') + + def create_random_hd_wallet() -> HDWallet: + m = Mnemonic('english') + words = m.to_mnemonic(manager.rng.randbytes(32)) + hd = HDWallet(words=words) + hd._manually_initialize() + return hd + + return DAGBuilder.from_manager( + manager=manager, + genesis_words=seed, + wallet_factory=create_random_hd_wallet, + ) diff --git a/hathor/cli/main.py b/hathor/cli/main.py index 41e77ad05..24c205e9f 100644 --- a/hathor/cli/main.py +++ b/hathor/cli/main.py @@ -32,6 +32,7 @@ def __init__(self) -> None: self.longest_cmd: int = 0 from . import ( + check_blueprint, db_export, db_import, generate_genesis, @@ -102,6 +103,7 @@ def __init__(self) -> None: self.add_cmd('dev', 'load-from-logs', load_from_logs, 'Load vertices as they are found in a log dump that was parsed with parse-logs') self.add_cmd('dev', 'parse-logs', parse_logs, 'Parse a log dump to use it with load-from-logs') + self.add_cmd('dev', 'check-blueprint', check_blueprint, 'Check for syntax validity of a Blueprint') def add_cmd(self, group: str, cmd: str, module: ModuleType, short_description: Optional[str] = None) -> None: self.command_list[cmd] = module diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index bb8655a82..a1245a469 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -137,13 +137,15 @@ def execute(args: Namespace) -> None: from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm + from hathor.verification.verification_params import VerificationParams from hathor.verification.verification_service import VerificationService from hathor.verification.vertex_verifiers import VertexVerifiers settings = get_global_settings() daa = DifficultyAdjustmentAlgorithm(settings=settings) + verification_params = VerificationParams.default_for_mempool() verifiers = VertexVerifiers.create_defaults(settings=settings, daa=daa, feature_service=Mock()) verification_service = VerificationService(settings=settings, verifiers=verifiers) - verification_service.verify_without_storage(block) + verification_service.verify_without_storage(block, verification_params) except HathorError: print('[{}] ERROR: Block has not been pushed because it is not valid.'.format(datetime.datetime.now())) else: diff --git a/hathor/cli/openapi_files/openapi_base.json b/hathor/cli/openapi_files/openapi_base.json index 146f5e01d..46a0daac3 100644 --- a/hathor/cli/openapi_files/openapi_base.json +++ b/hathor/cli/openapi_files/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.63.1" + "version": "0.64.0" }, "consumes": [ "application/json" diff --git a/hathor/cli/openapi_files/register.py b/hathor/cli/openapi_files/register.py index 77dc29b87..0d355f684 100644 --- a/hathor/cli/openapi_files/register.py +++ b/hathor/cli/openapi_files/register.py @@ -37,6 +37,7 @@ def get_registered_resources() -> list[type[Resource]]: import hathor.event.resources.event # noqa: 401 import hathor.feature_activation.resources.feature # noqa: 401 import hathor.healthcheck.resources.healthcheck # noqa: 401 + import hathor.nanocontracts.resources # noqa: 401 import hathor.p2p.resources # noqa: 401 import hathor.profiler.resources # noqa: 401 import hathor.stratum.resources # noqa: 401 diff --git a/hathor/cli/quick_test.py b/hathor/cli/quick_test.py index 2bf6f16fe..0701ab7a9 100644 --- a/hathor/cli/quick_test.py +++ b/hathor/cli/quick_test.py @@ -12,14 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import os from argparse import ArgumentParser -from typing import Any +from typing import TYPE_CHECKING, Any, Generator from structlog import get_logger +from twisted.internet.defer import inlineCallbacks from hathor.cli.run_node import RunNode +if TYPE_CHECKING: + from hathor.transaction import BaseTransaction, Block, Transaction + logger = get_logger() @@ -28,35 +34,24 @@ def __init__(self, vertex_handler, manager, n_blocks): self.log = logger.new() self._vertex_handler = vertex_handler self._manager = manager - self._n_blocks = n_blocks - - def on_new_vertex(self, *args: Any, **kwargs: Any) -> bool: - from hathor.transaction import Block - from hathor.transaction.base_transaction import GenericVertex - - msg: str | None = None - res = self._vertex_handler.on_new_vertex(*args, **kwargs) - - if self._n_blocks is None: - should_quit = res - msg = 'added a tx' - else: - vertex = args[0] - should_quit = False - assert isinstance(vertex, GenericVertex) - - if isinstance(vertex, Block): - should_quit = vertex.get_height() >= self._n_blocks - msg = f'reached height {vertex.get_height()}' - - if should_quit: - assert msg is not None - self.log.info(f'successfully {msg}, exit now') + self._n_blocks = n_blocks or 0 + + @inlineCallbacks + def on_new_block(self, block: Block, *args: Any, **kwargs: Any) -> Generator[Any, Any, bool]: + res = yield self._vertex_handler.on_new_block(block, *args, **kwargs) + if block.get_height() >= self._n_blocks: + self.log.info(f'successfully reached height {block.get_height()}, exit now') self._manager.connections.disconnect_all_peers(force=True) self._manager.reactor.fireSystemEvent('shutdown') os._exit(0) return res + def on_new_mempool_transaction(self, tx: Transaction) -> bool: + return self._vertex_handler.on_new_mempool_transaction(tx) + + def on_new_relayed_vertex(self, vertex: BaseTransaction, *args: Any, **kwargs: Any) -> bool: + return self._vertex_handler.on_new_mempool_transaction(vertex, *args, **kwargs) + class QuickTest(RunNode): @classmethod diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 3b4fb9e33..7f2618624 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -69,6 +69,7 @@ def create_parser(cls) -> ArgumentParser: """ from hathor.cli.util import create_parser from hathor.feature_activation.feature import Feature + from hathor.nanocontracts.nc_exec_logs import NCLogConfig parser = create_parser(prefix=cls.env_vars_prefix) parser.add_argument('--hostname', help='Hostname used to be accessed by other peers') @@ -79,6 +80,8 @@ def create_parser(cls) -> ArgumentParser: netargs = parser.add_mutually_exclusive_group() netargs.add_argument('--nano-testnet', action='store_true', help='Connect to Hathor nano-testnet') netargs.add_argument('--testnet', action='store_true', help='Connect to Hathor testnet') + netargs.add_argument('--testnet-hotel', action='store_true', help='Connect to Hathor testnet-hotel') + netargs.add_argument('--localnet', action='store_true', help='Create a localnet with default configuration.') parser.add_argument('--test-mode-tx-weight', action='store_true', help='Reduces tx weight to 1 for testing purposes') @@ -95,12 +98,12 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--x-status-ipv6-interface', help='IPv6 interface to bind the status server') parser.add_argument('--stratum', type=int, help='Port to run stratum server') parser.add_argument('--x-stratum-ipv6-interface', help='IPv6 interface to bind the stratum server') - parser.add_argument('--data', help='Data directory') - storage = parser.add_mutually_exclusive_group() - storage.add_argument('--rocksdb-storage', action='store_true', help='Use RocksDB storage backend (default)') - storage.add_argument('--memory-storage', action='store_true', help='Do not use a persistent storage') - parser.add_argument('--memory-indexes', action='store_true', - help='Use memory indexes when using RocksDB storage (startup is significantly slower)') + data_group = parser.add_mutually_exclusive_group() + data_group.add_argument('--data', help='Data directory') + data_group.add_argument('--temp-data', action='store_true', + help='Automatically create storage in a temporary directory') + parser.add_argument('--memory-storage', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--memory-indexes', action='store_true', help=SUPPRESS) # deprecated parser.add_argument('--rocksdb-cache', type=int, help='RocksDB block-table cache size (bytes)', default=None) parser.add_argument('--wallet', help='Set wallet type. Options are hd (Hierarchical Deterministic) or keypair', default=None) @@ -114,6 +117,8 @@ def create_parser(cls) -> ArgumentParser: help='Create an index of transactions by address and allow searching queries') parser.add_argument('--utxo-index', action='store_true', help='Create an index of UTXOs by token/address/amount and allow searching queries') + parser.add_argument('--nc-indexes', action='store_true', + help='Enable indexes related to nano contracts') parser.add_argument('--prometheus', action='store_true', help='Send metric data to Prometheus') parser.add_argument('--prometheus-prefix', default='', help='A prefix that will be added in all Prometheus metrics') @@ -124,8 +129,6 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--cache-interval', type=int, help='Cache flush interval') parser.add_argument('--recursion-limit', type=int, help='Set python recursion limit') parser.add_argument('--allow-mining-without-peers', action='store_true', help='Allow mining without peers') - fvargs = parser.add_mutually_exclusive_group() - fvargs.add_argument('--x-full-verification', action='store_true', help=SUPPRESS) # deprecated parser.add_argument('--procname-prefix', help='Add a prefix to the process name', default='') parser.add_argument('--allow-non-standard-script', action='store_true', help='Accept non-standard scripts on ' '/push-tx API') @@ -134,16 +137,14 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--sentry-dsn', help='Sentry DSN') parser.add_argument('--enable-debug-api', action='store_true', help='Enable _debug/* endpoints') parser.add_argument('--enable-crash-api', action='store_true', help='Enable _crash/* endpoints') - sync_args = parser.add_mutually_exclusive_group() - sync_args.add_argument('--sync-bridge', action='store_true', help=SUPPRESS) # deprecated - sync_args.add_argument('--sync-v1-only', action='store_true', help=SUPPRESS) # deprecated - sync_args.add_argument('--sync-v2-only', action='store_true', help=SUPPRESS) # deprecated - sync_args.add_argument('--x-remove-sync-v1', action='store_true', help=SUPPRESS) # deprecated - sync_args.add_argument('--x-sync-v1-only', action='store_true', help=SUPPRESS) # deprecated - sync_args.add_argument('--x-sync-v2-only', action='store_true', help=SUPPRESS) # deprecated - sync_args.add_argument('--x-sync-bridge', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--sync-bridge', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--sync-v1-only', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--sync-v2-only', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--x-remove-sync-v1', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--x-sync-v1-only', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--x-sync-v2-only', action='store_true', help=SUPPRESS) # deprecated + parser.add_argument('--x-sync-bridge', action='store_true', help=SUPPRESS) # deprecated parser.add_argument('--x-localhost-only', action='store_true', help='Only connect to peers on localhost') - parser.add_argument('--x-rocksdb-indexes', action='store_true', help=SUPPRESS) parser.add_argument('--x-enable-event-queue', action='store_true', help='Deprecated: use --enable-event-queue instead.') parser.add_argument('--enable-event-queue', action='store_true', help='Enable event queue mechanism') @@ -168,6 +169,9 @@ def create_parser(cls) -> ArgumentParser: help='Enables listening on IPv6 interface and connecting to IPv6 peers') parser.add_argument('--x-disable-ipv4', action='store_true', help='Disables connecting to IPv4 peers') + possible_nc_exec_logs = [config.value for config in NCLogConfig] + parser.add_argument('--nc-exec-logs', default=NCLogConfig.NONE, choices=possible_nc_exec_logs, + help=f'Enable saving Nano Contracts execution logs. One of {possible_nc_exec_logs}') return parser def prepare(self, *, register_resources: bool = True) -> None: @@ -254,7 +258,7 @@ def prepare(self, *, register_resources: bool = True) -> None: tx_storage=self.manager.tx_storage, indexes=self.manager.tx_storage.indexes, wallet=self.manager.wallet, - rocksdb_storage=getattr(builder, 'rocksdb_storage', None), + rocksdb_storage=builder.rocksdb_storage, stratum_factory=self.manager.stratum_factory, feature_service=self.manager.vertex_handler._feature_service, bit_signaling_service=self.manager._bit_signaling_service, @@ -459,9 +463,9 @@ def check_unsafe_arguments(self) -> None: def check_python_version(self) -> None: # comments to help grep's - MIN_VER = (3, 10) # Python-3.10 - MIN_STABLE = (3, 10) # Python-3.10 - RECOMMENDED_VER = (3, 10) # Python-3.10 + MIN_VER = (3, 11) # Python-3.11 + MIN_STABLE = (3, 12) # Python-3.12 + RECOMMENDED_VER = (3, 12) # Python-3.12 cur = sys.version_info cur_pretty = '.'.join(map(str, cur)) min_pretty = '.'.join(map(str, MIN_VER)) @@ -491,7 +495,12 @@ def check_python_version(self) -> None: ])) def __init__(self, *, argv=None): - from hathor.conf import NANO_TESTNET_SETTINGS_FILEPATH, TESTNET_SETTINGS_FILEPATH + from hathor.conf import ( + LOCALNET_SETTINGS_FILEPATH, + NANO_TESTNET_SETTINGS_FILEPATH, + TESTNET_HOTEL_SETTINGS_FILEPATH, + TESTNET_SETTINGS_FILEPATH, + ) from hathor.conf.get_settings import get_global_settings self.log = logger.new() @@ -508,8 +517,12 @@ def __init__(self, *, argv=None): os.environ['HATHOR_CONFIG_YAML'] = self._args.config_yaml elif self._args.testnet: os.environ['HATHOR_CONFIG_YAML'] = TESTNET_SETTINGS_FILEPATH + elif self._args.testnet_hotel: + os.environ['HATHOR_CONFIG_YAML'] = TESTNET_HOTEL_SETTINGS_FILEPATH elif self._args.nano_testnet: os.environ['HATHOR_CONFIG_YAML'] = NANO_TESTNET_SETTINGS_FILEPATH + elif self._args.localnet: + os.environ['HATHOR_CONFIG_YAML'] = LOCALNET_SETTINGS_FILEPATH try: get_global_settings() diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index 6f076253f..96470f518 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -17,6 +17,7 @@ from pydantic import Extra from hathor.feature_activation.feature import Feature # skip-cli-import-custom-check +from hathor.nanocontracts.nc_exec_logs import NCLogConfig # skip-cli-import-custom-check from hathor.utils.pydantic import BaseModel # skip-cli-import-custom-check @@ -29,6 +30,7 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): auto_hostname: bool unsafe_mode: Optional[str] testnet: bool + testnet_hotel: bool test_mode_tx_weight: bool dns: Optional[str] peer: Optional[str] @@ -40,9 +42,9 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): stratum: Optional[int] x_stratum_ipv6_interface: Optional[str] data: Optional[str] - rocksdb_storage: bool memory_storage: bool memory_indexes: bool + temp_data: bool rocksdb_cache: Optional[int] wallet: Optional[str] wallet_enable_api: bool @@ -59,7 +61,6 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): cache_interval: Optional[int] recursion_limit: Optional[int] allow_mining_without_peers: bool - x_full_verification: bool procname_prefix: str allow_non_standard_script: bool max_output_script_size: Optional[int] @@ -74,7 +75,6 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): sync_v1_only: bool sync_v2_only: bool x_localhost_only: bool - x_rocksdb_indexes: bool x_enable_event_queue: bool enable_event_queue: bool peer_id_blacklist: list[str] @@ -88,3 +88,6 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): disable_ws_history_streaming: bool x_enable_ipv6: bool x_disable_ipv4: bool + localnet: bool + nc_indexes: bool + nc_exec_logs: NCLogConfig diff --git a/hathor/cli/side_dag.py b/hathor/cli/side_dag.py index 20132f2d5..6658b8f08 100644 --- a/hathor/cli/side_dag.py +++ b/hathor/cli/side_dag.py @@ -82,10 +82,10 @@ def main(capture_stdout: bool) -> None: $ python -m hathor run_node_with_side_dag --testnet --procname-prefix testnet- - --memory-storage + --temp-data --side-dag-config-yaml ./my-side-dag.yml --side-dag-procname-prefix my-side-dag- - --side-dag-memory-storage + --side-dag-temp-data --json-logs both ``` diff --git a/hathor/conf/__init__.py b/hathor/conf/__init__.py index 36fecbae5..07dab18ab 100644 --- a/hathor/conf/__init__.py +++ b/hathor/conf/__init__.py @@ -20,13 +20,17 @@ MAINNET_SETTINGS_FILEPATH = str(parent_dir / 'mainnet.yml') TESTNET_SETTINGS_FILEPATH = str(parent_dir / 'testnet.yml') +TESTNET_HOTEL_SETTINGS_FILEPATH = str(parent_dir / 'testnet_hotel.yml') NANO_TESTNET_SETTINGS_FILEPATH = str(parent_dir / 'nano_testnet.yml') +LOCALNET_SETTINGS_FILEPATH = str(parent_dir / 'localnet.yml') UNITTESTS_SETTINGS_FILEPATH = str(parent_dir / 'unittests.yml') __all__ = [ 'MAINNET_SETTINGS_FILEPATH', 'TESTNET_SETTINGS_FILEPATH', + 'TESTNET_HOTEL_SETTINGS_FILEPATH', 'NANO_TESTNET_SETTINGS_FILEPATH', + 'LOCALNET_SETTINGS_FILEPATH', 'UNITTESTS_SETTINGS_FILEPATH', 'HathorSettings', ] diff --git a/hathor/conf/localnet.yml b/hathor/conf/localnet.yml new file mode 100644 index 000000000..d175faeaf --- /dev/null +++ b/hathor/conf/localnet.yml @@ -0,0 +1,27 @@ +P2PKH_VERSION_BYTE: x49 +MULTISIG_VERSION_BYTE: x87 +NETWORK_NAME: local-privatenet +BOOTSTRAP_DNS: [] + +# Ledger genesis +GENESIS_OUTPUT_SCRIPT: 76a91466665b27f7dbc4c8c089d2f686c170c74d66f0b588ac +GENESIS_BLOCK_TIMESTAMP: 1643902665 +GENESIS_BLOCK_NONCE: 4784939 +GENESIS_BLOCK_HASH: 00000334a21fbb58b4db8d7ff282d018e03e2977abd3004cf378fb1d677c3967 +GENESIS_TX1_NONCE: 0 +GENESIS_TX1_HASH: 54165cef1fd4cf2240d702b8383c307c822c16ca407f78014bdefa189a7571c2 +GENESIS_TX2_NONCE: 0 +GENESIS_TX2_HASH: 039906854ce6309b3180945f2a23deb9edff369753f7082e19053f5ac11bfbae + +# Genesis wallet: +# avocado spot town typical traffic vault danger century property shallow divorce festival +# spend attack anchor afford rotate green audit adjust fade wagon depart level + +MIN_TX_WEIGHT_K: 0 +MIN_TX_WEIGHT_COEFFICIENT: 0 +MIN_TX_WEIGHT: 1 +REWARD_SPEND_MIN_BLOCKS: 1 + +CHECKPOINTS: [] + +extends: testnet.yml diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index c5614e34b..937328a75 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -227,6 +227,18 @@ version='0.60.0', signal_support_by_default=True, ), + Feature.COUNT_CHECKDATASIG_OP: Criteria( + bit=0, + # N = 5_765_760 + # Expected to be reached around Tuesday, 2025-08-12 17:39:56 GMT + # Right now the best block is 5_748_286 at Wednesday, 2025-08-06 16:02:56 GMT + start_height=5_765_760, + timeout_height=5_886_720, # N + 6 * 20160 (6 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=False, + version='0.64.0', + signal_support_by_default=True, + ), } ) ) diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index d05a07e08..1ddef948f 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -207,3 +207,15 @@ FEATURE_ACTIVATION: lock_in_on_timeout: false version: 0.60.0 signal_support_by_default: true + + COUNT_CHECKDATASIG_OP: + bit: 0 + # N = 5_765_760 + # Expected to be reached around Tuesday, 2025-08-12 17:39:56 GMT + # Right now the best block is 5_748_286 at Wednesday, 2025-08-06 16:02:56 GMT + start_height: 5_765_760 + timeout_height: 5_886_720 # N + 6 * 20160 (6 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.64.0 + signal_support_by_default: true diff --git a/hathor/conf/nano_testnet.py b/hathor/conf/nano_testnet.py deleted file mode 100644 index 32f7ab7c9..000000000 --- a/hathor/conf/nano_testnet.py +++ /dev/null @@ -1,38 +0,0 @@ -# Copyright 2022 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.conf.settings import HathorSettings - -SETTINGS = HathorSettings( - P2PKH_VERSION_BYTE=b'\x49', - MULTISIG_VERSION_BYTE=b'\x87', - NETWORK_NAME='nano-testnet-alpha', - BOOTSTRAP_DNS=[], - # Genesis stuff - GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a91478e804bf8aa68332c6c1ada274ac598178b972bf88ac'), - GENESIS_BLOCK_TIMESTAMP=1677601898, - GENESIS_BLOCK_NONCE=7881594, - GENESIS_BLOCK_HASH=bytes.fromhex('000003472f6a17c2199e24c481a4326c217d07376acd9598651f8413c008554d'), - GENESIS_TX1_NONCE=110, - GENESIS_TX1_HASH=bytes.fromhex('0008f0e9dbe6e4bbc3a85fce7494fee70011b9c7e72f5276daa2a235355ac013'), - GENESIS_TX2_NONCE=180, - GENESIS_TX2_HASH=bytes.fromhex('008d81d9d58a43fd9649f33483d804a4417247b4d4e4e01d64406c4177fee0c2'), - # tx weight parameters. With these settings, tx weight is always 8 - MIN_TX_WEIGHT_K=0, - MIN_TX_WEIGHT_COEFFICIENT=0, - MIN_TX_WEIGHT=8, - CHECKPOINTS=[], - ENABLE_NANO_CONTRACTS=True, - BLUEPRINTS={}, -) diff --git a/hathor/conf/nano_testnet.yml b/hathor/conf/nano_testnet.yml index ece2b1b87..979ba340b 100644 --- a/hathor/conf/nano_testnet.yml +++ b/hathor/conf/nano_testnet.yml @@ -1,20 +1,24 @@ P2PKH_VERSION_BYTE: x49 MULTISIG_VERSION_BYTE: x87 -NETWORK_NAME: nano-testnet-alpha -BOOTSTRAP_DNS: [] +NETWORK_NAME: nano-testnet-bravo +BOOTSTRAP_DNS: + - bravo.nano-testnet.hathor.network # Genesis stuff GENESIS_OUTPUT_SCRIPT: 76a91478e804bf8aa68332c6c1ada274ac598178b972bf88ac -GENESIS_BLOCK_TIMESTAMP: 1677601898 -GENESIS_BLOCK_NONCE: 7881594 -GENESIS_BLOCK_HASH: 000003472f6a17c2199e24c481a4326c217d07376acd9598651f8413c008554d -GENESIS_TX1_NONCE: 110 -GENESIS_TX1_HASH: 0008f0e9dbe6e4bbc3a85fce7494fee70011b9c7e72f5276daa2a235355ac013 -GENESIS_TX2_NONCE: 180 -GENESIS_TX2_HASH: 008d81d9d58a43fd9649f33483d804a4417247b4d4e4e01d64406c4177fee0c2 +GENESIS_BLOCK_TIMESTAMP: 1750978888 +GENESIS_BLOCK_NONCE: 896384 +GENESIS_BLOCK_HASH: 000003076f294c2c93d8cc48f68b6c93087361ca78c54faa91daaffde84ba916 +GENESIS_TX1_NONCE: 16 +GENESIS_TX1_HASH: 001c9a3e8810bc3389b0fd3cfb118e9190f95bd5bf313a9575a4663d0a80af2d +GENESIS_TX2_NONCE: 154 +GENESIS_TX2_HASH: 002fecfce5e78047f9b967a27b1b2436c3fea17e24c770d59421bacdcadda0ea # tx weight parameters. With these settings tx weight is always 8 MIN_TX_WEIGHT_K: 0 MIN_TX_WEIGHT_COEFFICIENT: 0 MIN_TX_WEIGHT: 8 -ENABLE_NANO_CONTRACTS: true +ENABLE_NANO_CONTRACTS: enabled +NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: + - WWFiNeWAFSmgtjm4ht2MydwS5GY3kMJsEK + - WQFDxic8xWWnMLL4aE5abY2XRKPNvGhtjY diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index 654ca398f..c42bfad26 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -13,6 +13,7 @@ # limitations under the License. import os +from enum import StrEnum, auto, unique from math import log from pathlib import Path from typing import Any, NamedTuple, Optional, Union @@ -33,6 +34,31 @@ HATHOR_TOKEN_UID = b'\x00' +@unique +class NanoContractsSetting(StrEnum): + """Enum to configure the state of the Nano Contracts feature.""" + + # Completely disabled. + DISABLED = auto() + + # Completely enabled since network creation. + ENABLED = auto() + + # Enabled through Feature Activation. + FEATURE_ACTIVATION = auto() + + def __bool__(self) -> bool: + """ + >>> bool(NanoContractsSetting.DISABLED) + False + >>> bool(NanoContractsSetting.ENABLED) + True + >>> bool(NanoContractsSetting.FEATURE_ACTIVATION) + True + """ + return self in (NanoContractsSetting.ENABLED, NanoContractsSetting.FEATURE_ACTIVATION) + + class HathorSettings(NamedTuple): # Version byte of the address in P2PKH P2PKH_VERSION_BYTE: bytes @@ -359,10 +385,6 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: # Amount in which tx min weight reaches the middle point between the minimum and maximum weight MIN_TX_WEIGHT_K: int = 100 - # When the node is being initialized (with a full verification) we don't verify - # the difficulty of all blocks, we execute the validation every N blocks only - VERIFY_WEIGHT_EVERY_N_BLOCKS: int = 1000 - # Capabilities CAPABILITY_WHITELIST: str = 'whitelist' CAPABILITY_SYNC_VERSION: str = 'sync-version' @@ -441,10 +463,7 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: MAX_UNVERIFIED_PEERS_PER_CONN: int = 100 # Used to enable nano contracts. - # - # This should NEVER be enabled for mainnet and testnet, since both networks will - # activate Nano Contracts through the Feature Activation. - ENABLE_NANO_CONTRACTS: bool = False + ENABLE_NANO_CONTRACTS: NanoContractsSetting = NanoContractsSetting.DISABLED # List of enabled blueprints. BLUEPRINTS: dict[bytes, 'str'] = {} @@ -456,6 +475,24 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: NATIVE_TOKEN_NAME: str = 'Hathor' NATIVE_TOKEN_SYMBOL: str = 'HTR' + # The pubkeys allowed to create on-chain-blueprints in the network + # XXX: in the future this restriction will be lifted, possibly through a feature activation + NC_ON_CHAIN_BLUEPRINT_RESTRICTED: bool = True + NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: list[str] = [] + + # Max length in bytes allowed for on-chain blueprint code after decompression, 240KB (not KiB) + NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_UNCOMPRESSED: int = 240_000 + + # Max length in bytes allowed for on-chain blueprint code inside the transaction, 24KB (not KiB) + NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_COMPRESSED: int = 24_000 + + # TODO: align this with a realistic value later + # fuel units are arbitrary but it's roughly the number of opcodes, memory_limit is in bytes + NC_INITIAL_FUEL_TO_LOAD_BLUEPRINT_MODULE: int = 100_000 # 100K opcodes + NC_MEMORY_LIMIT_TO_LOAD_BLUEPRINT_MODULE: int = 100 * 1024 * 1024 # 100MiB + NC_INITIAL_FUEL_TO_CALL_METHOD: int = 1_000_000 # 1M opcodes + NC_MEMORY_LIMIT_TO_CALL_METHOD: int = 1024 * 1024 * 1024 # 1GiB + @classmethod def from_yaml(cls, *, filepath: str) -> 'HathorSettings': """Takes a filepath to a yaml file and returns a validated HathorSettings instance.""" diff --git a/hathor/conf/testnet.py b/hathor/conf/testnet.py index dc6a42907..382a66196 100644 --- a/hathor/conf/testnet.py +++ b/hathor/conf/testnet.py @@ -109,7 +109,20 @@ lock_in_on_timeout=False, version='0.63.0', signal_support_by_default=True, - ) + ), + + Feature.COUNT_CHECKDATASIG_OP: Criteria( + bit=0, + # N = 5_120_640 + # Expected to be reached around Wednesday, 2025-08-13 03:09:44 GMT + # Right now the best block is 5_102_018 at Wednesday, 2025-08-06 15:58:44 GMT + start_height=5_120_640, # N + timeout_height=5_241_600, # N + 6 * 20160 (6 weeks after the start) + minimum_activation_height=0, + lock_in_on_timeout=False, + version='0.64.0', + signal_support_by_default=True, + ), } ) ) diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index 7a4fb0452..9f9104475 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -82,7 +82,7 @@ FEATURE_ACTIVATION: NOP_FEATURE_1: bit: 0 # N = 4_495_680 - # Expected to be reached around Tuesday, 2025-01-06. + # Expected to be reached around Monday, 2025-01-06. # Right now the best block is 4_489_259 on testnet (2025-01-03). start_height: 4_495_680 # N timeout_height: 4_576_320 # N + 4 * 20160 (4 weeks after the start) @@ -90,3 +90,15 @@ FEATURE_ACTIVATION: lock_in_on_timeout: false version: 0.63.0 signal_support_by_default: true + + COUNT_CHECKDATASIG_OP: + bit: 0 + # N = 5_120_640 + # Expected to be reached around Wednesday, 2025-08-13 03:09:44 GMT + # Right now the best block is 5_102_018 at Wednesday, 2025-08-06 15:58:44 GMT + start_height: 5_120_640 + timeout_height: 5_241_600 # N + 6 * 20160 (6 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.64.0 + signal_support_by_default: true diff --git a/hathor/conf/testnet_hotel.yml b/hathor/conf/testnet_hotel.yml new file mode 100644 index 000000000..160db7d9d --- /dev/null +++ b/hathor/conf/testnet_hotel.yml @@ -0,0 +1,121 @@ +P2PKH_VERSION_BYTE: x49 +MULTISIG_VERSION_BYTE: x87 +NETWORK_NAME: testnet-hotel +BOOTSTRAP_DNS: + - hotel.testnet.hathor.network + +# Genesis stuff +GENESIS_OUTPUT_SCRIPT: 76a914a584cf48b161e4a49223ed220df30037ab740e0088ac +GENESIS_BLOCK_TIMESTAMP: 1577836800 +GENESIS_BLOCK_NONCE: 826272 +GENESIS_BLOCK_HASH: 0000033139d08176d1051fb3a272c3610457f0c7f686afbe0afe3d37f966db85 +GENESIS_TX1_NONCE: 190 +GENESIS_TX1_HASH: 00e161a6b0bee1781ea9300680913fb76fd0fac4acab527cd9626cc1514abdc9 +GENESIS_TX2_NONCE: 115 +GENESIS_TX2_HASH: 00975897028ceb037307327c953f5e7ad4d3f42402d71bd3d11ecb63ac39f01a + +# tx weight parameters. With these settings tx weight is always 8 +MIN_TX_WEIGHT_K: 0 +MIN_TX_WEIGHT_COEFFICIENT: 0 +MIN_TX_WEIGHT: 8 +CHECKPOINTS: + 100_000: 0000007ece4c7830169f360ed11c51b776e1b72bf0060e6e5b325ca8be474ac5 + 200_000: 00000113ecd4b666116abf3d3f05ad509d903d6b456a1e8c35e46a9e426af11a + 300_000: 000000e42df13e4e7490cee98f303cb3b0ca33f362af180c5f7df740c98699d9 + 400_000: 000000e9a748b34fc4d662d88bb36ef2a033ba129960924208be14eccdac1a65 + 500_000: 000000b5c4572d7b85e585849540ece44b73948c5cdbc6f17a9a3a77fbd0f29a + 600_000: 000000f6743ba3d67e51d7adc21821b8263726ce3bc86010d5e1a905bf2531dc + 700_000: 0000008fda01c9e5fd6f99a5461e6dbf1039cba38cc8d0fc738a097d71caa968 + 800_000: 000000397af32fcc4eeb6985d96326c1ff4644792631872a00394688b1782af5 + 900_000: 00000097ae405036614f4335ad0e631df8fc5f7434e82c3421627e2fea4e1830 + 1_000_000: 000000145ba662cdee0d72034658f93a0a3a4568d5ba5083ff09013ca1e6556c + 1_100_000: 000000404e6ff6a23695a6ffe712ce1c4efc02e75bbc11c3129f4c2377b07743 + 1_200_000: 0000003be5fae5bb2c9ceaed589d172bcd9e74ca6c8d7d2ca06567f65cea7c9b + 1_300_000: 0000000000007d39de6e781c377bc202213b0b5b60db14c13d0b16e06d6fd5ac + 1_400_000: 000000000df9cb786c68a643a52a67c22ab54e8b8e41cbe9b761133f6c8abbfe + 1_500_000: 000000000c3591805f4748480b59ac1788f754fc004930985a487580e2b5de8f + 1_600_000: 00000000060adfdfd7d488d4d510b5779cf35a3c50df7bcff941fbb6957be4d2 + 1_700_000: 0000000007afc04aebad15b14fcd93c1b5193dc503b190433f55be8c218b6d12 + 1_800_000: 00000000126f16af2ba934a60cf8f2da32d3ed2688c56ce8ff477e483a3ffc42 + 1_900_000: 0000000005d2a2ba2231663187b460396189af0ffca7b2e93fccc85cde04cbdc + 2_000_000: 000000000009a8451ff2d5ec54951d717da2766aedb3131485466cc993879ee1 + 2_100_000: 0000000009f961804cd7f43da05f08a94a2fa09f82c7d605afc5982ab242a7e4 + 2_200_000: 0000000002e260b970846a89c23e754a763e7c5f1578b6ec4e67bdb94c667997 + 2_300_000: 0000000006e0894c8f7fd029fe446a42433875647759183ba3fbb0ff0b7ceb64 + 2_400_000: 0000000011ab28f3be17e3a098307fa73750cc8d74f1f60cfb44b524a60c94ec + 2_500_000: 00000000045d2bcc10c896bfc7d1f28788e3530a81f50ee096f386eec772634f + 2_600_000: 000000000766b9ac25e2ece5685effa834e61284e38f368c841210606bb1fdfc + 2_700_000: 0000000005d0ee31d0f47f6ff9aa570b9f25b9d44a8a59cea0e0f8a1729b9c90 + 2_800_000: 000000000a5bd4f266fa13d2c0594cabf6465758f7f5814bde626032706b81e5 + 2_900_000: 000000000b11b0a09ff0d7c2cfd9228f31c53008e700532e439d3a3d9c63fb8e + 3_000_000: 00000000013289569569cd51580183a2c870dfe5a395adaa00ae66fefe51af3d + 3_100_000: 00000000170c55e6ec207400bfc42786c1e0c32fe045a1d815f930daf2bf3020 + 3_200_000: 00000000149986cb99c202136bd388fb2a7fcba4bdfd6ac049069ac5e08a587f + 3_300_000: 000000000e16f87ac7133639cb52a99574944b8457939396e7faf1615fcfdb0f + 3_400_000: 000000000f551f6224a459904436072f5ff10fd3db17f2d7e25b1ef9b149c121 + 3_500_000: 0000000006572b8cf41130e88776adf8583e970905df2afe593ca31c91ab0c4c + 3_600_000: 000000000215fcc7018cc31bbfb943ca43c6297529fa008bf34665f3ac64d340 + 3_700_000: 000000000dbf5e8ab4f90f2187db6db429c9d0cb8169051ce8a9e79b810509d7 + 3_800_000: 00000000030411ec36c7f5386a94e147460d86592f85459e0eadd5cd0e3da7b4 + 3_900_000: 000000000bc2c7078a3c59d878196f1491aad45a0df9d312909d85482ac8d714 + 4_000_000: 000000000eba0dae3ec27cf5596ef49731744edebadb9fbae42160b6aa2e2461 + 4_100_000: 00000000052aa77fd8db71d5306257f9fe068c3401d95b17fcedcccfc9b76c82 + 4_200_000: 00000000010a8dae043c84fcb2cef6a2b42a28279b95af20ab5a098acf2a3565 + 4_300_000: 000000000019da781ef75fa5f59c5537d8ed18b64c589c3e036109cfb1d84f7d + +FEATURE_ACTIVATION: + default_threshold: 15_120 # 15120 = 75% of evaluation_interval (20160) + features: + INCREASE_MAX_MERKLE_PATH_LENGTH: + bit: 3 + # N = 3_548_160 + # Expected to be reached around Sunday, 2024-02-04. + # Right now the best block is 3_521_000 on testnet (2024-01-26). + start_height: 3_548_160 + timeout_height: 3_588_480 + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.59.0 + signal_support_by_default: true + + # NOP feature to test Feature Activation for Transactions + NOP_FEATURE_1: + bit: 0 + # N = 4_495_680 + # Expected to be reached around Monday, 2025-01-06. + # Right now the best block is 4_489_259 on testnet (2025-01-03). + start_height: 4_495_680 # N + timeout_height: 4_576_320 # N + 4 * 20160 (4 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.63.0 + signal_support_by_default: true + + NANO_CONTRACTS: + bit: 0 + # N = 5_040_000 + # Expected to be reached around Friday, 2025-07-18. + # Right now the best block is 5_033_266 on testnet-hotel (2025-07-16). + start_height: 5_040_000 # N + timeout_height: 5_080_320 # N + 2 * 20160 (2 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.64.0 + signal_support_by_default: true + + COUNT_CHECKDATASIG_OP: + bit: 1 + # N = 5_100_480 + # Expected to be reached around Saturday, 2025-08-09 09:31:28 GMT + # Right now the best block is 5_092_661 at Wednesday, 2025-08-06 16:21:58 GMT + start_height: 5_100_480 + timeout_height: 5_221_440 # N + 6 * 20160 (6 weeks after the start) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.64.0 + signal_support_by_default: true + +ENABLE_NANO_CONTRACTS: feature_activation +NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: + - WWFiNeWAFSmgtjm4ht2MydwS5GY3kMJsEK + - WQFDxic8xWWnMLL4aE5abY2XRKPNvGhtjY diff --git a/hathor/conf/unittests.py b/hathor/conf/unittests.py deleted file mode 100644 index afd06e266..000000000 --- a/hathor/conf/unittests.py +++ /dev/null @@ -1,44 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.conf.settings import HathorSettings -from hathor.feature_activation.settings import Settings as FeatureActivationSettings - -SETTINGS = HathorSettings( - P2PKH_VERSION_BYTE=b'\x28', - MULTISIG_VERSION_BYTE=b'\x64', - NETWORK_NAME='unittests', - BLOCKS_PER_HALVING=2 * 60, - MIN_BLOCK_WEIGHT=2, - MIN_TX_WEIGHT=2, - MIN_SHARE_WEIGHT=2, - MAX_TX_WEIGHT_DIFF=25.0, - BLOCK_DIFFICULTY_N_BLOCKS=20, - GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a914d07bc82d6e0d1bb116614076645e9b87c8c83b4188ac'), - GENESIS_BLOCK_NONCE=5, - GENESIS_BLOCK_HASH=bytes.fromhex('2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488'), - GENESIS_TX1_NONCE=6, - GENESIS_TX1_HASH=bytes.fromhex('16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'), - GENESIS_TX2_NONCE=2, - GENESIS_TX2_HASH=bytes.fromhex('33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'), - REWARD_SPEND_MIN_BLOCKS=10, - SLOW_ASSERTS=True, - MAX_TX_WEIGHT_DIFF_ACTIVATION=0.0, - FEATURE_ACTIVATION=FeatureActivationSettings( - evaluation_interval=4, - max_signal_bits=4, - default_threshold=3 - ), - ENABLE_NANO_CONTRACTS=True, -) diff --git a/hathor/conf/unittests.yml b/hathor/conf/unittests.yml index fdcc5e261..ebc6a3e1b 100644 --- a/hathor/conf/unittests.yml +++ b/hathor/conf/unittests.yml @@ -17,9 +17,22 @@ GENESIS_TX2_HASH: 33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e8 REWARD_SPEND_MIN_BLOCKS: 10 SLOW_ASSERTS: true MAX_TX_WEIGHT_DIFF_ACTIVATION: 0.0 -ENABLE_NANO_CONTRACTS: true FEATURE_ACTIVATION: evaluation_interval: 4 max_signal_bits: 4 default_threshold: 3 + +ENABLE_NANO_CONTRACTS: enabled + +NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: + # keypair wallet: + # - privkey: + # MIH0MF8GCSqGSIb3DQEFDTBSMDEGCSqGSIb3DQEFDDAkBBCIdovnmKjK3KUc61YGgja0AgIIAD + # AMBggqhkiG9w0CCQUAMB0GCWCGSAFlAwQBKgQQl2CJT4I2IUzRNoU9hyOWEwSBkLznN9Nunel+ + # kK0FXpk//z0ZAnIyVacfHklCxFGyOj1VSjor0CHzH2Gmblvr+m7lCmRmqSVAwJpplqQYdBUF6s + # R9djHLY6svPY0o//dqQ/xM7QiY2FHlb3JQCTu7DaMflqPcJXlRXAFyoACnmj4/lUJWgrcWalar + # CSI+8rIillg3AU8/2gfoB1BxulVIIG35SQ== + # - password: + # OCBtestPW + - HFwHrQHUftQ7obLj7xbQjG4ZEwvyVXeyoE diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index d77dee210..1ed212435 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -12,18 +12,28 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + +import hashlib +import traceback from itertools import chain from typing import TYPE_CHECKING, Any, Iterable, Optional, cast from structlog import get_logger +from typing_extensions import assert_never -from hathor.conf.get_settings import get_global_settings from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.types import MetaNCCallRecord from hathor.util import classproperty from hathor.utils.weight import weight_to_work if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings from hathor.consensus.context import ConsensusAlgorithmContext + from hathor.nanocontracts.nc_exec_logs import NCLogStorage + from hathor.nanocontracts.runner import Runner + from hathor.nanocontracts.runner.runner import RunnerFactory logger = get_logger() @@ -33,9 +43,17 @@ class BlockConsensusAlgorithm: """Implement the consensus algorithm for blocks.""" - def __init__(self, context: 'ConsensusAlgorithmContext') -> None: - self._settings = get_global_settings() + def __init__( + self, + settings: HathorSettings, + context: 'ConsensusAlgorithmContext', + runner_factory: RunnerFactory, + nc_log_storage: NCLogStorage, + ) -> None: + self._settings = settings self.context = context + self._runner_factory = runner_factory + self._nc_log_storage = nc_log_storage @classproperty def log(cls) -> Any: @@ -46,7 +64,230 @@ def log(cls) -> Any: return _base_transaction_log def update_consensus(self, block: Block) -> None: + assert self.context.nc_events is None + self.context.nc_events = [] self.update_voided_info(block) + if self._settings.ENABLE_NANO_CONTRACTS: + self.execute_nano_contracts(block) + + def _nc_initialize_genesis(self, block: Block) -> None: + """Initialize the genesis block with an empty contract trie.""" + assert block.is_genesis + meta = block.get_metadata() + block_storage = self.context.consensus.nc_storage_factory.get_empty_block_storage() + block_storage.commit() + if meta.nc_block_root_id is not None: + assert meta.nc_block_root_id == block_storage.get_root_id() + else: + meta.nc_block_root_id = block_storage.get_root_id() + self.context.save(block) + + def execute_nano_contracts(self, block: Block) -> None: + """Execute the method calls for transactions confirmed by this block handling reorgs.""" + # If we reach this point, Nano Contracts must be enabled. + assert self._settings.ENABLE_NANO_CONTRACTS + + meta = block.get_metadata() + + if block.is_genesis: + self._nc_initialize_genesis(block) + return + + if meta.voided_by: + # If the block is voided, skip execution. + return + + assert meta.nc_block_root_id is None + + to_be_executed: list[Block] = [] + is_reorg: bool = False + if self.context.reorg_common_block: + # handle reorgs + is_reorg = True + cur = block + # XXX We could stop when `cur_meta.nc_block_root_id is not None` but + # first we need to refactor meta.first_block and meta.voided_by to + # have different values per block. + while cur != self.context.reorg_common_block: + cur_meta = cur.get_metadata() + if cur_meta.nc_block_root_id is not None: + # Reset nc_block_root_id to force re-execution. + cur_meta.nc_block_root_id = None + to_be_executed.append(cur) + cur = cur.get_block_parent() + else: + # no reorg occurred, so simply execute this new block. + to_be_executed = [block] + + for current in to_be_executed[::-1]: + self._nc_execute_calls(current, is_reorg=is_reorg) + + def _nc_execute_calls(self, block: Block, *, is_reorg: bool) -> None: + """Internal method to execute the method calls for transactions confirmed by this block. + """ + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, NCFail + from hathor.nanocontracts.types import Address + + assert self._settings.ENABLE_NANO_CONTRACTS + + meta = block.get_metadata() + assert not meta.voided_by + assert meta.nc_block_root_id is None + + parent = block.get_block_parent() + if parent.is_genesis: + # XXX We can remove this call after the full node initialization is refactored and + # the genesis block goes through the consensus protocol. + self._nc_initialize_genesis(parent) + parent_meta = parent.get_metadata() + block_root_id = parent_meta.nc_block_root_id + assert block_root_id is not None + + nc_calls: list[Transaction] = [] + for tx in block.iter_transactions_in_this_block(): + if not tx.is_nano_contract(): + # Skip other type of transactions. + continue + tx_meta = tx.get_metadata() + if is_reorg: + assert self.context.reorg_common_block is not None + # Clear the NC_EXECUTION_FAIL_ID flag if this is the only reason the transaction was voided. + # This case might only happen when handling reorgs. + assert tx.storage is not None + if tx_meta.voided_by == {tx.hash, NC_EXECUTION_FAIL_ID}: + if tx_meta.conflict_with: + for tx_conflict_id in tx_meta.conflict_with: + tx_conflict = tx.storage.get_transaction(tx_conflict_id) + tx_conflict_meta = tx_conflict.get_metadata() + assert tx_conflict_meta.first_block is None + assert tx_conflict_meta.voided_by + self.context.transaction_algorithm.remove_voided_by(tx, tx.hash) + tx_meta.voided_by = None + self.context.save(tx) + tx_meta.nc_execution = NCExecutionState.PENDING + nc_calls.append(tx) + + if not nc_calls: + meta.nc_block_root_id = block_root_id + self.context.save(block) + return + + nc_sorted_calls = self.context.consensus.nc_calls_sorter(block, nc_calls) + block_storage = self.context.consensus.nc_storage_factory.get_block_storage(block_root_id) + seed_hasher = hashlib.sha256(block.hash) + + for tx in nc_sorted_calls: + seed_hasher.update(tx.hash) + seed_hasher.update(block_storage.get_root_id()) + + tx_meta = tx.get_metadata() + if tx_meta.voided_by: + # Skip voided transactions. This might happen if a previous tx in nc_calls fails and + # mark this tx as voided. + tx_meta.nc_execution = NCExecutionState.SKIPPED + self.context.save(tx) + # Update seqnum even for skipped nano transactions. + nc_header = tx.get_nano_header() + seqnum = block_storage.get_address_seqnum(Address(nc_header.nc_address)) + if nc_header.nc_seqnum > seqnum: + block_storage.set_address_seqnum(Address(nc_header.nc_address), nc_header.nc_seqnum) + continue + + runner = self._runner_factory.create(block_storage=block_storage, seed=seed_hasher.digest()) + exception_and_tb: tuple[NCFail, str] | None = None + try: + runner.execute_from_tx(tx) + except NCFail as e: + kwargs: dict[str, Any] = {} + if tx.name: + kwargs['__name'] = tx.name + self.log.info( + 'nc execution failed', + tx=tx.hash.hex(), + error=repr(e), + cause=repr(e.__cause__), + **kwargs, + ) + exception_and_tb = e, traceback.format_exc() + self.mark_as_nc_fail_execution(tx) + else: + tx_meta.nc_execution = NCExecutionState.SUCCESS + self.context.save(tx) + # TODO Avoid calling multiple commits for the same contract. The best would be to call the commit + # method once per contract per block, just like we do for the block_storage. This ensures we will + # have a clean database with no orphan nodes. + runner.commit() + + # Update metadata. + self.nc_update_metadata(tx, runner) + + # Update indexes. This must be after metadata is updated. + assert tx.storage is not None + assert tx.storage.indexes is not None + tx.storage.indexes.handle_contract_execution(tx) + + # We only emit events when the nc is successfully executed. + assert self.context.nc_events is not None + last_call_info = runner.get_last_call_info() + self.context.nc_events.append((tx, last_call_info.nc_logger.__events__)) + finally: + # We save logs regardless of whether the nc successfully executed. + self._nc_log_storage.save_logs(tx, runner.get_last_call_info(), exception_and_tb) + + # Save block state root id. If nothing happens, it should be the same as its block parent. + block_storage.commit() + assert block_storage.get_root_id() is not None + meta.nc_block_root_id = block_storage.get_root_id() + self.context.save(block) + + for tx in nc_calls: + tx_meta = tx.get_metadata() + assert tx_meta.nc_execution is not None + self.log.info('nano tx execution status', + blk=block.hash.hex(), + tx=tx.hash.hex(), + execution=tx_meta.nc_execution.value) + match tx_meta.nc_execution: + case NCExecutionState.PENDING: + assert False # should never happen + case NCExecutionState.SUCCESS: + assert tx_meta.voided_by is None + case NCExecutionState.FAILURE: + assert tx_meta.voided_by == {tx.hash, NC_EXECUTION_FAIL_ID} + case NCExecutionState.SKIPPED: + assert tx_meta.voided_by + assert NC_EXECUTION_FAIL_ID not in tx_meta.voided_by + case _: + assert_never(tx_meta.nc_execution) + + def nc_update_metadata(self, tx: Transaction, runner: 'Runner') -> None: + from hathor.nanocontracts.runner.types import CallType + + meta = tx.get_metadata() + assert meta.nc_execution is NCExecutionState.SUCCESS + call_info = runner.get_last_call_info() + assert call_info.calls is not None + nc_calls = [ + MetaNCCallRecord.from_call_record(call) + for call in call_info.calls if call.type == CallType.PUBLIC + ] + + # Update metadata. + assert meta.nc_calls is None + meta.nc_calls = nc_calls + self.context.save(tx) + + def mark_as_nc_fail_execution(self, tx: Transaction) -> None: + """Mark that a transaction failed execution. It also propagates its voidedness through the DAG of funds.""" + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID + assert tx.storage is not None + tx_meta = tx.get_metadata() + tx_meta.add_voided_by(NC_EXECUTION_FAIL_ID) + tx_meta.nc_execution = NCExecutionState.FAILURE + self.context.save(tx) + self.context.transaction_algorithm.add_voided_by(tx, + tx.hash, + is_dag_verifications=False) def update_voided_info(self, block: Block) -> None: """ This method is called only once when a new block arrives. @@ -227,6 +468,7 @@ def union_voided_by_from_parents(self, block: Block) -> set[bytes]: are not propagated through the chains. For further information, see the docstring of the ConsensusAlgorithm class. """ + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID voided_by: set[bytes] = set() for parent in block.get_parents(): parent_meta = parent.get_metadata() @@ -240,7 +482,8 @@ def union_voided_by_from_parents(self, block: Block) -> set[bytes]: # the blocks themselves. voided_by2 = voided_by2.copy() voided_by2.discard(parent.hash) - voided_by.update(self.context.consensus.filter_out_soft_voided_entries(parent, voided_by2)) + voided_by.update(self.context.consensus.filter_out_voided_by_entries_from_parents(parent, voided_by2)) + voided_by.discard(NC_EXECUTION_FAIL_ID) return voided_by def update_voided_by_from_parents(self, block: Block) -> bool: @@ -443,6 +686,13 @@ def remove_first_block_markers(self, block: Block) -> None: bfs.skip_neighbors(tx) continue + if tx.is_nano_contract(): + if meta.nc_execution is NCExecutionState.SUCCESS: + assert tx.storage is not None + assert tx.storage.indexes is not None + tx.storage.indexes.handle_contract_unexecution(tx) + meta.nc_execution = NCExecutionState.PENDING + meta.nc_calls = None meta.first_block = None self.context.save(tx) @@ -532,5 +782,17 @@ def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> class BlockConsensusAlgorithmFactory: + __slots__ = ('settings', 'nc_log_storage', '_runner_factory') + + def __init__( + self, + settings: HathorSettings, + runner_factory: RunnerFactory, + nc_log_storage: NCLogStorage, + ) -> None: + self.settings = settings + self._runner_factory = runner_factory + self.nc_log_storage = nc_log_storage + def __call__(self, context: 'ConsensusAlgorithmContext') -> BlockConsensusAlgorithm: - return BlockConsensusAlgorithm(context) + return BlockConsensusAlgorithm(self.settings, context, self._runner_factory, self.nc_log_storage) diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index 4691b52b8..3d8b2a006 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -19,7 +19,6 @@ from structlog import get_logger -from hathor.conf.get_settings import get_global_settings from hathor.consensus.block_consensus import BlockConsensusAlgorithmFactory from hathor.consensus.context import ConsensusAlgorithmContext from hathor.consensus.transaction_consensus import TransactionConsensusAlgorithmFactory @@ -29,6 +28,11 @@ from hathor.util import not_none if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + from hathor.nanocontracts import NCStorageFactory + from hathor.nanocontracts.nc_exec_logs import NCLogStorage + from hathor.nanocontracts.runner.runner import RunnerFactory + from hathor.nanocontracts.sorter.types import NCSorterCallable from hathor.transaction.storage import TransactionStorage logger = get_logger() @@ -65,15 +69,23 @@ class ConsensusAlgorithm: def __init__( self, + nc_storage_factory: 'NCStorageFactory', soft_voided_tx_ids: set[bytes], pubsub: PubSubManager, + *, + settings: HathorSettings, + runner_factory: RunnerFactory, + nc_calls_sorter: NCSorterCallable, + nc_log_storage: NCLogStorage, ) -> None: - self._settings = get_global_settings() + self._settings = settings self.log = logger.new() self._pubsub = pubsub + self.nc_storage_factory = nc_storage_factory self.soft_voided_tx_ids = frozenset(soft_voided_tx_ids) - self.block_algorithm_factory = BlockConsensusAlgorithmFactory() + self.block_algorithm_factory = BlockConsensusAlgorithmFactory(settings, runner_factory, nc_log_storage) self.transaction_algorithm_factory = TransactionConsensusAlgorithmFactory() + self.nc_calls_sorter = nc_calls_sorter def create_context(self) -> ConsensusAlgorithmContext: """Handy method to create a context that can be used to access block and transaction algorithms.""" @@ -113,21 +125,42 @@ def unsafe_update(self, base: BaseTransaction) -> None: else: raise NotImplementedError - new_best_height, new_best_tip = storage.indexes.height.get_height_tip() txs_to_remove: list[BaseTransaction] = [] + + from hathor.nanocontracts.exception import NanoContractDoesNotExist + for tx_affected in context.txs_affected: + if not tx_affected.is_nano_contract(): + # Not a nano tx? Skip! + continue + if tx_affected.get_metadata().first_block: + # Not in mempool? Skip! + continue + assert isinstance(tx_affected, Transaction) + nano_header = tx_affected.get_nano_header() + try: + # TODO: We use this call to check whether the contract ID still exists after the reorg, as it may + # have been a contract created by another contract that became "unexecuted" after the reorg. We + # could use a more explicit check here instead of relying on this method. + nano_header.get_blueprint_id() + except NanoContractDoesNotExist: + from hathor.transaction.validation_state import ValidationState + tx_affected.set_validation(ValidationState.INVALID) + + new_best_height, new_best_tip = storage.indexes.height.get_height_tip() if new_best_height < best_height: self.log.warn('height decreased, re-checking mempool', prev_height=best_height, new_height=new_best_height, prev_block_tip=best_tip.hex(), new_block_tip=new_best_tip.hex()) # XXX: this method will mark as INVALID all transactions in the mempool that became invalid because of a # reward lock - txs_to_remove = storage.compute_transactions_that_became_invalid(new_best_height) - if txs_to_remove: - self.log.warn('some transactions on the mempool became invalid and will be removed', - count=len(txs_to_remove)) - # XXX: because transactions in `txs_to_remove` are marked as invalid, we need this context to be - # able to remove them - with storage.allow_invalid_context(): - self._remove_transactions(txs_to_remove, storage, context) + txs_to_remove.extend(storage.compute_transactions_that_became_invalid(new_best_height)) + + if txs_to_remove: + self.log.warn('some transactions on the mempool became invalid and will be removed', + count=len(txs_to_remove)) + # XXX: because transactions in `txs_to_remove` are marked as invalid, we need this context to be + # able to remove them + with storage.allow_invalid_context(): + self._remove_transactions(txs_to_remove, storage, context) # emit the reorg started event if needed if context.reorg_common_block is not None: @@ -149,6 +182,16 @@ def unsafe_update(self, base: BaseTransaction) -> None: tx_affected.storage.indexes.update(tx_affected) context.pubsub.publish(HathorEvents.CONSENSUS_TX_UPDATE, tx=tx_affected) + # handle custom NC events + if isinstance(base, Block): + assert context.nc_events is not None + for tx, events in context.nc_events: + assert tx.is_nano_contract() + for event in events: + context.pubsub.publish(HathorEvents.NC_EVENT, tx=tx, nc_event=event) + else: + assert context.nc_events is None + # And emit events for txs that were removed for tx_removed in txs_to_remove: context.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, tx=tx_removed) @@ -157,7 +200,16 @@ def unsafe_update(self, base: BaseTransaction) -> None: if context.reorg_common_block is not None: context.pubsub.publish(HathorEvents.REORG_FINISHED) - def filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: set[bytes]) -> set[bytes]: + def filter_out_voided_by_entries_from_parents(self, tx: BaseTransaction, voided_by: set[bytes]) -> set[bytes]: + """Filter out voided_by entries that should be inherited from parents.""" + voided_by = set(voided_by) + voided_by = self._filter_out_nc_fail_entries(tx, voided_by) + voided_by = self._filter_out_soft_voided_entries(tx, voided_by) + return voided_by + + def _filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: set[bytes]) -> set[bytes]: + """Remove voided_by entries of soft voided transactions.""" + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID if not (self.soft_voided_tx_ids & voided_by): return voided_by ret = set() @@ -166,6 +218,8 @@ def filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: set[byt continue if h == self._settings.CONSENSUS_FAIL_ID: continue + if h == NC_EXECUTION_FAIL_ID: + continue if h == tx.hash: continue if h in self.soft_voided_tx_ids: @@ -178,6 +232,32 @@ def filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: set[byt ret.add(h) return ret + def _filter_out_nc_fail_entries(self, tx: BaseTransaction, voided_by: set[bytes]) -> set[bytes]: + """Remove NC_EXECUTION_FAIL_ID flag from voided_by inherited by parents.""" + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID + ret = set(voided_by) + if NC_EXECUTION_FAIL_ID in ret: + # If NC_EXECUTION_FAIL_ID is in voided_by, then tx.hash must be in voided_by too. + # So we remove both of them. + ret.remove(NC_EXECUTION_FAIL_ID) + ret.remove(tx.hash) + # Then we remove all hashes from transactions that also have the NC_EXECUTION_FAIL_ID flag. + for h in voided_by: + if h == self._settings.SOFT_VOIDED_ID: + continue + if h == NC_EXECUTION_FAIL_ID: + continue + if h == tx.hash: + continue + assert tx.storage is not None + tx2 = tx.storage.get_transaction(h) + tx2_meta = tx2.get_metadata() + tx2_voided_by: set[bytes] = tx2_meta.voided_by or set() + if NC_EXECUTION_FAIL_ID in tx2_voided_by: + ret.discard(h) + assert NC_EXECUTION_FAIL_ID not in ret + return ret + def _remove_transactions( self, txs: list[BaseTransaction], diff --git a/hathor/consensus/consensus_settings.py b/hathor/consensus/consensus_settings.py index 259b35f99..f07cb514d 100644 --- a/hathor/consensus/consensus_settings.py +++ b/hathor/consensus/consensus_settings.py @@ -12,10 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import hashlib from abc import ABC, abstractmethod from enum import Enum, unique -from typing import Annotated, Any, Literal, TypeAlias +from typing import TYPE_CHECKING, Annotated, Any, Literal, TypeAlias from pydantic import Field, NonNegativeInt, PrivateAttr, validator from typing_extensions import override @@ -24,6 +26,9 @@ from hathor.util import json_dumpb from hathor.utils.pydantic import BaseModel +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + @unique class ConsensusType(str, Enum): @@ -44,13 +49,19 @@ def is_poa(self) -> bool: return self.type is ConsensusType.PROOF_OF_AUTHORITY @abstractmethod - def _get_valid_vertex_versions(self, include_genesis: bool) -> set[TxVersion]: + def _get_valid_vertex_versions(self, include_genesis: bool, *, settings: HathorSettings) -> set[TxVersion]: """Return a set of `TxVersion`s that are valid in for this consensus type.""" raise NotImplementedError - def is_vertex_version_valid(self, version: TxVersion, include_genesis: bool = False) -> bool: + def is_vertex_version_valid( + self, + version: TxVersion, + *, + settings: HathorSettings, + include_genesis: bool = False, + ) -> bool: """Return whether a `TxVersion` is valid for this consensus type.""" - return version in self._get_valid_vertex_versions(include_genesis) + return version in self._get_valid_vertex_versions(include_genesis, settings=settings) def get_peer_hello_hash(self) -> str | None: """Return a hash of consensus settings to be used in peer hello validation.""" @@ -67,14 +78,19 @@ class PowSettings(_BaseConsensusSettings): type: Literal[ConsensusType.PROOF_OF_WORK] = ConsensusType.PROOF_OF_WORK @override - def _get_valid_vertex_versions(self, include_genesis: bool) -> set[TxVersion]: - return { + def _get_valid_vertex_versions(self, include_genesis: bool, *, settings: HathorSettings) -> set[TxVersion]: + versions = { TxVersion.REGULAR_BLOCK, TxVersion.REGULAR_TRANSACTION, TxVersion.TOKEN_CREATION_TRANSACTION, - TxVersion.MERGE_MINED_BLOCK + TxVersion.MERGE_MINED_BLOCK, } + if settings.ENABLE_NANO_CONTRACTS: + versions.add(TxVersion.ON_CHAIN_BLUEPRINT) + + return versions + @override def get_peer_hello_hash(self) -> str | None: return None @@ -124,7 +140,7 @@ def _validate_signers(cls, signers: tuple[PoaSignerSettings, ...]) -> tuple[PoaS return signers @override - def _get_valid_vertex_versions(self, include_genesis: bool) -> set[TxVersion]: + def _get_valid_vertex_versions(self, include_genesis: bool, *, settings: HathorSettings) -> set[TxVersion]: versions = { TxVersion.POA_BLOCK, TxVersion.REGULAR_TRANSACTION, @@ -136,6 +152,9 @@ def _get_valid_vertex_versions(self, include_genesis: bool) -> set[TxVersion]: # This may be removed if we refactor the way genesis is constructed. versions.add(TxVersion.REGULAR_BLOCK) + if settings.ENABLE_NANO_CONTRACTS: + versions.add(TxVersion.ON_CHAIN_BLUEPRINT) + return versions @override diff --git a/hathor/consensus/context.py b/hathor/consensus/context.py index a83af60b2..1e14c7540 100644 --- a/hathor/consensus/context.py +++ b/hathor/consensus/context.py @@ -12,17 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from typing import TYPE_CHECKING, Optional from structlog import get_logger from hathor.pubsub import PubSubManager -from hathor.transaction import BaseTransaction, Block +from hathor.transaction import BaseTransaction, Block, Transaction if TYPE_CHECKING: from hathor.consensus.block_consensus import BlockConsensusAlgorithm from hathor.consensus.consensus import ConsensusAlgorithm from hathor.consensus.transaction_consensus import TransactionConsensusAlgorithm + from hathor.nanocontracts.nc_exec_logs import NCEvent logger = get_logger() @@ -39,6 +42,7 @@ class ConsensusAlgorithmContext: transaction_algorithm: 'TransactionConsensusAlgorithm' txs_affected: set[BaseTransaction] reorg_common_block: Optional[Block] + nc_events: list[tuple[Transaction, list[NCEvent]]] | None def __init__(self, consensus: 'ConsensusAlgorithm', pubsub: PubSubManager) -> None: self.consensus = consensus @@ -47,6 +51,7 @@ def __init__(self, consensus: 'ConsensusAlgorithm', pubsub: PubSubManager) -> No self.transaction_algorithm = self.consensus.transaction_algorithm_factory(self) self.txs_affected = set() self.reorg_common_block = None + self.nc_events = None def save(self, tx: BaseTransaction) -> None: """Only metadata is ever saved in a consensus update.""" diff --git a/hathor/consensus/poa/poa_block_producer.py b/hathor/consensus/poa/poa_block_producer.py index a11758246..60f7c9269 100644 --- a/hathor/consensus/poa/poa_block_producer.py +++ b/hathor/consensus/poa/poa_block_producer.py @@ -193,7 +193,7 @@ def _produce_block(self, previous_block: PoaBlock) -> None: parent=block.get_block_parent_hash().hex(), voided=bool(block.get_metadata().voided_by), ) - self.manager.on_new_tx(block, propagate_to_peers=True, fails_silently=False) + self.manager.on_new_tx(block, propagate_to_peers=True) def _expected_block_timestamp(self, previous_block: Block, signer_index: int) -> int: """Calculate the expected timestamp for a new block.""" diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py index 12d55b270..dc4f868fe 100644 --- a/hathor/consensus/transaction_consensus.py +++ b/hathor/consensus/transaction_consensus.py @@ -18,6 +18,7 @@ from hathor.conf.get_settings import get_global_settings from hathor.transaction import BaseTransaction, Block, Transaction, TxInput +from hathor.types import VertexId from hathor.util import classproperty from hathor.utils.weight import weight_to_work @@ -48,6 +49,15 @@ def update_consensus(self, tx: Transaction) -> None: self.mark_inputs_as_used(tx) self.update_voided_info(tx) self.set_conflict_twins(tx) + self.execute_nano_contracts(tx) + + def execute_nano_contracts(self, tx: Transaction) -> None: + """This method is called when the transaction is added to the mempool. + + The method is currently only executed when the transaction is confirmed by a block. + Hence, we do nothing here. + """ + pass def mark_inputs_as_used(self, tx: Transaction) -> None: """ Mark all its inputs as used @@ -166,6 +176,7 @@ def check_twins(self, tx: Transaction, transactions: Iterable[BaseTransaction]) def update_voided_info(self, tx: Transaction) -> None: """ This method should be called only once when the transactions is added to the DAG. """ + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID assert tx.storage is not None voided_by: set[bytes] = set() @@ -174,8 +185,11 @@ def update_voided_info(self, tx: Transaction) -> None: for parent in tx.get_parents(): parent_meta = parent.get_metadata() if parent_meta.voided_by: - voided_by.update(self.context.consensus.filter_out_soft_voided_entries(parent, parent_meta.voided_by)) + voided_by.update( + self.context.consensus.filter_out_voided_by_entries_from_parents(parent, parent_meta.voided_by) + ) assert self._settings.SOFT_VOIDED_ID not in voided_by + assert NC_EXECUTION_FAIL_ID not in voided_by assert not (self.context.consensus.soft_voided_tx_ids & voided_by) # Union of voided_by of inputs @@ -185,7 +199,9 @@ def update_voided_info(self, tx: Transaction) -> None: if spent_meta.voided_by: voided_by.update(spent_meta.voided_by) voided_by.discard(self._settings.SOFT_VOIDED_ID) + voided_by.discard(NC_EXECUTION_FAIL_ID) assert self._settings.SOFT_VOIDED_ID not in voided_by + assert NC_EXECUTION_FAIL_ID not in voided_by # Update accumulated weight of the transactions voiding us. assert tx.hash not in voided_by @@ -232,8 +248,8 @@ def update_voided_info(self, tx: Transaction) -> None: if conflict_tx_meta.voided_by: if conflict_tx_meta.first_block is not None: # do nothing - assert bool(self.context.consensus.soft_voided_tx_ids & conflict_tx_meta.voided_by) - self.log.info('skipping soft voided conflict', conflict_tx=conflict_tx.hash_hex) + self.assert_voided_with_first_block(conflict_tx) + self.log.info('skipping voided conflict with first block', conflict_tx=conflict_tx.hash_hex) else: self.mark_as_voided(conflict_tx) @@ -245,6 +261,30 @@ def update_voided_info(self, tx: Transaction) -> None: # Assert the final state is valid. self.assert_valid_consensus(tx) + def assert_voided_with_first_block(self, tx: BaseTransaction) -> None: + """Assert the voided transaction with first block is valid.""" + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID + assert tx.storage is not None + + meta = tx.get_metadata() + assert meta.voided_by is not None + if bool(self.context.consensus.soft_voided_tx_ids & meta.voided_by): + # Soft voided txs can be confirmed by blocks. + return + if NC_EXECUTION_FAIL_ID in meta.voided_by: + # Nano transactions that failed execution can be confirmed by blocks. + assert tx.is_nano_contract() + return + for h in meta.voided_by: + # Transactions voided by Nano transactions that failed execution can be confirmed by blocks. + tx2 = cast(Transaction, tx.storage.get_transaction(h)) + tx2_meta = tx2.get_metadata() + assert tx2_meta.voided_by + if NC_EXECUTION_FAIL_ID in tx2_meta.voided_by: + assert tx2.is_nano_contract() + return + raise AssertionError + def assert_valid_consensus(self, tx: BaseTransaction) -> None: """Assert the conflict resolution is valid.""" meta = tx.get_metadata() @@ -382,7 +422,39 @@ def mark_as_voided(self, tx: Transaction) -> None: self.add_voided_by(tx, tx.hash) self.assert_valid_consensus(tx) - def add_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: + def has_only_nc_execution_fail_id(self, tx: Transaction) -> bool: + """Return true if the only reason that tx is voided is because of nano execution failures.""" + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID + meta = tx.get_metadata() + + if meta.voided_by is None: + return False + assert meta.voided_by + + if tx.hash in meta.voided_by: + if NC_EXECUTION_FAIL_ID not in meta.voided_by: + # If tx has a conflict, it is voiding itself but did not failed nano execution, + # then we can safely return False. + return False + + for h in meta.voided_by: + if h == tx.hash: + continue + if h == NC_EXECUTION_FAIL_ID: + continue + if h == self._settings.SOFT_VOIDED_ID: + return False + assert tx.storage is not None + tx2 = tx.storage.get_transaction(h) + tx2_meta = tx2.get_metadata() + tx2_voided_by: set[VertexId] = tx2_meta.voided_by or set() + if NC_EXECUTION_FAIL_ID not in tx2_voided_by: + return False + assert tx2_voided_by == {tx2.hash, NC_EXECUTION_FAIL_ID} + + return True + + def add_voided_by(self, tx: Transaction, voided_hash: bytes, *, is_dag_verifications: bool = True) -> bool: """ Add a hash from `meta.voided_by` and its descendants (both from verification DAG and funds tree). """ @@ -394,11 +466,17 @@ def add_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: self.log.debug('add_voided_by', tx=tx.hash_hex, voided_hash=voided_hash.hex()) - is_dag_verifications = True if meta.voided_by and bool(self.context.consensus.soft_voided_tx_ids & meta.voided_by): # If tx is soft voided, we can only walk through the DAG of funds. is_dag_verifications = False + if self.has_only_nc_execution_fail_id(tx): + # If a transaction is voided solely because other nano transactions have failed execution, + # we should restrict our traversal to the DAG of funds only. This is important because if + # a transaction has a conflict and loses during conflict resolution, it will add itself + # to meta.voided_by. + is_dag_verifications = False + from hathor.transaction.storage.traversal import BFSTimestampWalk bfs = BFSTimestampWalk(tx.storage, is_dag_funds=True, is_dag_verifications=is_dag_verifications, is_left_to_right=True) diff --git a/hathor/daa.py b/hathor/daa.py index d3ae33379..358c4aabf 100644 --- a/hathor/daa.py +++ b/hathor/daa.py @@ -19,18 +19,20 @@ NOTE: This module could use a better name. """ +from __future__ import annotations + from enum import IntFlag from math import log from typing import TYPE_CHECKING, Callable, ClassVar, Optional from structlog import get_logger -from hathor.conf.settings import HathorSettings from hathor.profiler import get_cpu_profiler from hathor.types import VertexId from hathor.util import iwindows if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings from hathor.transaction import Block, Transaction logger = get_logger() diff --git a/hathor/dag_builder/artifacts.py b/hathor/dag_builder/artifacts.py index 8137951ca..b0a4ae0fe 100644 --- a/hathor/dag_builder/artifacts.py +++ b/hathor/dag_builder/artifacts.py @@ -14,13 +14,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, NamedTuple +from typing import TYPE_CHECKING, Iterator, NamedTuple, TypeVar from hathor.dag_builder.types import DAGNode +from hathor.manager import HathorManager if TYPE_CHECKING: from hathor.transaction import BaseTransaction +T = TypeVar('T', bound='BaseTransaction') + class _Pair(NamedTuple): node: DAGNode @@ -38,3 +41,40 @@ def __init__(self, items: Iterator[tuple[DAGNode, BaseTransaction]]) -> None: self.by_name[node.name] = p self.list: tuple[_Pair, ...] = tuple(v) + self._last_propagated: str | None = None + + def get_typed_vertex(self, name: str, type_: type[T]) -> T: + """Get a vertex by name, asserting it is of the provided type.""" + _, vertex = self.by_name[name] + assert isinstance(vertex, type_) + return vertex + + def get_typed_vertices(self, names: list[str], type_: type[T]) -> list[T]: + """Get a list of vertices by name, asserting they are of the provided type.""" + return [self.get_typed_vertex(name, type_) for name in names] + + def propagate_with(self, manager: HathorManager, *, up_to: str | None = None) -> None: + """ + Propagate vertices using the provided manager up to the provided node name, included. + Last propagation is preserved in memory so you can make a sequence of propagate_with(). + """ + found_begin = self._last_propagated is None + found_end = False + + for node, vertex in self.list: + if found_begin: + try: + assert manager.on_new_tx(vertex) + except Exception as e: + raise Exception(f'failed on_new_tx({node.name})') from e + self._last_propagated = node.name + + if node.name == self._last_propagated: + found_begin = True + + if up_to and node.name == up_to: + found_end = True + break + + assert found_begin, f'node "{self._last_propagated}" not found' + assert up_to is None or found_end, f'node "{up_to}" not found' diff --git a/hathor/dag_builder/builder.py b/hathor/dag_builder/builder.py index e28a6fdfd..369999ed2 100644 --- a/hathor/dag_builder/builder.py +++ b/hathor/dag_builder/builder.py @@ -14,7 +14,9 @@ from __future__ import annotations +import ast from collections import defaultdict +from types import ModuleType from typing import Iterator from structlog import get_logger @@ -33,10 +35,17 @@ VertexResolverType, WalletFactoryType, ) +from hathor.dag_builder.utils import is_literal, parse_amount_token +from hathor.manager import HathorManager +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.util import initialize_hd_wallet from hathor.wallet import BaseWallet logger = get_logger() +NC_DEPOSIT_KEY = 'nc_deposit' +NC_WITHDRAWAL_KEY = 'nc_withdrawal' + class DAGBuilder: def __init__( @@ -46,6 +55,8 @@ def __init__( genesis_wallet: BaseWallet, wallet_factory: WalletFactoryType, vertex_resolver: VertexResolverType, + nc_catalog: NCBlueprintCatalog, + blueprints_module: ModuleType | None = None, ) -> None: from hathor.dag_builder.default_filler import DefaultFiller from hathor.dag_builder.tokenizer import tokenize @@ -63,8 +74,32 @@ def __init__( genesis_wallet=genesis_wallet, wallet_factory=wallet_factory, vertex_resolver=vertex_resolver, + nc_catalog=nc_catalog, + blueprints_module=blueprints_module, + ) + + @staticmethod + def from_manager( + manager: HathorManager, + genesis_words: str, + wallet_factory: WalletFactoryType, + blueprints_module: ModuleType | None = None + ) -> DAGBuilder: + """Create a DAGBuilder instance from a HathorManager instance.""" + assert manager.tx_storage.nc_catalog + return DAGBuilder( + settings=manager._settings, + daa=manager.daa, + genesis_wallet=initialize_hd_wallet(genesis_words), + wallet_factory=wallet_factory, + vertex_resolver=lambda x: manager.cpu_mining_service.resolve(x), + nc_catalog=manager.tx_storage.nc_catalog, + blueprints_module=blueprints_module, ) + def get_main_wallet(self) -> BaseWallet: + return self._exporter.get_wallet('main') + def parse_tokens(self, tokens: Iterator[Token]) -> None: """Parse tokens and update the DAG accordingly.""" for parts in tokens: @@ -115,6 +150,22 @@ def add_deps(self, _from: str, _to: str) -> Self: from_node.deps.add(_to) return self + def set_balance(self, name: str, token: str, value: int) -> Self: + """Set the expected balance for a given token, where balance = sum(outputs) - sum(inputs). + + =0 means sum(txouts) = sum(txins) + >0 means sum(txouts) > sum(txins), e.g., withdrawal + <0 means sum(txouts) < sum(txins), e.g., deposit + """ + node = self._get_or_create_node(name) + if token in node.balances: + raise SyntaxError(f'{name}: balance set more than once for {token}') + node.balances[token] = value + if token != 'HTR': + self._get_or_create_node(token, default_type=DAGNodeType.Token) + self.add_deps(name, token) + return self + def add_blockchain(self, prefix: str, first_parent: str | None, first_index: int, last_index: int) -> Self: """Add a sequence of nodes representing a chain of blocks.""" prev = first_parent @@ -127,7 +178,7 @@ def add_blockchain(self, prefix: str, first_parent: str | None, first_index: int return self def add_parent_edge(self, _from: str, _to: str) -> Self: - """Add a parent edge between two nodes. For clarity, `_to` has to be created befre `_from`.""" + """Add a parent edge between two nodes. For clarity, `_to` has to be created before `_from`.""" self._get_or_create_node(_to) from_node = self._get_or_create_node(_from) from_node.parents.add(_to) @@ -154,13 +205,85 @@ def set_output(self, name: str, index: int, amount: int, token: str, attrs: Attr node.deps.add(token) return self + def _parse_expression(self, value: str) -> ast.AST: + try: + ret = ast.parse(value, mode='eval').body + except SyntaxError as e: + raise SyntaxError(f'failed parsing "{value}"') from e + return ret + + def _add_nc_attribute(self, name: str, key: str, value: str) -> None: + """Handle attributes related to nanocontract transactions.""" + node = self._get_or_create_node(name) + if key == 'nc_id': + parsed_value = self._parse_expression(value) + if isinstance(parsed_value, ast.Name): + node.deps.add(parsed_value.id) + elif isinstance(parsed_value, ast.Call): + for arg in parsed_value.args: + if isinstance(arg, ast.Name): + node.deps.add(arg.id) + elif isinstance(arg, ast.Attribute): + assert isinstance(arg.value, ast.Name) + node.deps.add(arg.value.id) + node.attrs[key] = parsed_value + + elif key in (NC_DEPOSIT_KEY, NC_WITHDRAWAL_KEY): + token, amount, args = parse_amount_token(value) + if args: + raise SyntaxError(f'unexpected args in `{value}`') + if amount < 0: + raise SyntaxError(f'unexpected negative action in `{value}`') + multiplier = 1 if key == NC_WITHDRAWAL_KEY else -1 + self.set_balance(name, token, amount * multiplier) + actions = node.get_attr_list(key, default=[]) + actions.append((token, amount)) + node.attrs[key] = actions + + else: + node.attrs[key] = value + + def _add_ocb_attribute(self, name: str, key: str, value: str) -> None: + """Handle attributes related to on-chain blueprint transactions.""" + node = self._get_or_create_node(name) + node.type = DAGNodeType.OnChainBlueprint + if key == 'ocb_code': + node.attrs[key] = value + + elif key == 'ocb_private_key': + if not is_literal(value): + raise SyntaxError(f'ocb_private_key must be a bytes literal: {value}') + node.attrs[key] = value + + elif key == 'ocb_password': + if not is_literal(value): + raise SyntaxError(f'ocb_password must be a bytes literal: {value}') + node.attrs[key] = value + + else: + node.attrs[key] = value + def add_attribute(self, name: str, key: str, value: str) -> Self: """Add an attribute to a node.""" + if key.startswith('nc_'): + self._add_nc_attribute(name, key, value) + return self + + if key.startswith('ocb_'): + self._add_ocb_attribute(name, key, value) + return self + + if key.startswith('balance_'): + token = key[len('balance_'):] + self.set_balance(name, token, int(value)) + return self + node = self._get_or_create_node(name) - if key == 'type': - node.type = DAGNodeType(value) - else: + if key not in node.attrs: node.attrs[key] = value + else: + raise SyntaxError('attribute key duplicated') + return self def topological_sorting(self) -> Iterator[DAGNode]: @@ -181,12 +304,14 @@ def topological_sorting(self) -> Iterator[DAGNode]: for _ in range(len(self._nodes)): if len(candidates) == 0: - self.log('fail because there is at least one cycle in the dependencies', - direct_deps=direct_deps, - rev_deps=rev_deps, - seen=seen, - not_seen=set(self._nodes.keys()) - seen, - nodes=self._nodes) + self.log.error( + 'fail because there is at least one cycle in the dependencies', + direct_deps=direct_deps, + rev_deps=rev_deps, + seen=seen, + not_seen=set(self._nodes.keys()) - seen, + nodes=self._nodes, + ) raise RuntimeError('there is at least one cycle') name = candidates.pop() assert name not in seen diff --git a/hathor/dag_builder/cli.py b/hathor/dag_builder/cli.py index ff6184fb4..d8afd0fef 100644 --- a/hathor/dag_builder/cli.py +++ b/hathor/dag_builder/cli.py @@ -23,6 +23,7 @@ def main(filename: str, genesis_seed: str) -> None: from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm + from hathor.nanocontracts.catalog import generate_catalog_from_settings from hathor.wallet import HDWallet settings = get_global_settings() @@ -36,6 +37,7 @@ def wallet_factory(words=None): genesis_wallet = wallet_factory(genesis_seed) daa = DifficultyAdjustmentAlgorithm(settings=settings) + nc_catalog = generate_catalog_from_settings(settings) builder = DAGBuilder( settings=settings, @@ -43,6 +45,7 @@ def wallet_factory(words=None): genesis_wallet=genesis_wallet, wallet_factory=wallet_factory, vertex_resolver=lambda x: None, + nc_catalog=nc_catalog, ) fp = open(filename, 'r') diff --git a/hathor/dag_builder/default_filler.py b/hathor/dag_builder/default_filler.py index 95026e2cc..9970b1bd1 100644 --- a/hathor/dag_builder/default_filler.py +++ b/hathor/dag_builder/default_filler.py @@ -15,11 +15,11 @@ from __future__ import annotations from collections import defaultdict -from math import ceil from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.dag_builder.builder import DAGBuilder, DAGInput, DAGNode, DAGNodeType, DAGOutput +from hathor.transaction.util import get_deposit_amount class DefaultFiller: @@ -64,7 +64,7 @@ def get_next_index(outputs: list[DAGOutput | None]) -> int: outputs.append(None) return len(outputs) - 1 - def fill_parents(self, node: DAGNode, *, target: int = 2, candidates: list[str] | None = []) -> None: + def fill_parents(self, node: DAGNode, *, target: int = 2, candidates: list[str] | None = None) -> None: """Fill parents of a vertex. Note: We shouldn't use the DAG transactions because it would confirm them, violating the DAG description.""" @@ -104,7 +104,10 @@ def find_txin(self, amount: int, token: str) -> DAGInput: return DAGInput(token, index) def calculate_balance(self, node: DAGNode) -> dict[str, int]: - """Calculate the balance for each token in a node.""" + """Calculate the balance for each token in a node. + + balance = sum(outputs) - sum(inputs) + """ ins: defaultdict[str, int] = defaultdict(int) for tx_name, index in node.inputs: node2 = self._get_or_create_node(tx_name) @@ -117,7 +120,7 @@ def calculate_balance(self, node: DAGNode) -> dict[str, int]: assert txout is not None outs[txout.token] += txout.amount - keys = set(ins.keys()) | set(outs.keys()) + keys = set(ins.keys()) | set(outs.keys()) | set(node.balances.keys()) balance = {} for key in keys: balance[key] = outs.get(key, 0) - ins.get(key, 0) @@ -129,9 +132,8 @@ def balance_node_inputs_and_outputs(self, node: DAGNode) -> None: balance = self.calculate_balance(node) for key, diff in balance.items(): - # =0 balance - # <0 need output - # >0 need input + target = node.balances.get(key, 0) + diff -= target if diff < 0: index = self.get_next_index(node.outputs) node.outputs[index] = DAGOutput(abs(diff), key, {'_origin': 'f3'}) @@ -221,6 +223,10 @@ def run(self) -> None: self.fill_parents(node) self.balance_node_inputs_and_outputs(node) + case DAGNodeType.OnChainBlueprint: + self.fill_parents(node) + self.balance_node_inputs_and_outputs(node) + case DAGNodeType.Token: tokens.append(node.name) self.fill_parents(node) @@ -234,15 +240,22 @@ def run(self) -> None: balance = self.calculate_balance(node) assert set(balance.keys()).issubset({'HTR', token}) - htr_minimum = ceil(balance[token] / 100) - htr_balance = -balance.get('HTR', 0) + htr_deposit = get_deposit_amount(self._settings, balance[token]) + htr_balance = balance.get('HTR', 0) - if htr_balance > htr_minimum: + # target = sum(outputs) - sum(inputs) + # <0 means deposit + # >0 means withdrawal + htr_target = node.balances.get('HTR', 0) - htr_deposit + + diff = htr_balance - htr_target + + if diff < 0: index = self.get_next_index(node.outputs) - node.outputs[index] = DAGOutput(htr_balance - htr_minimum, 'HTR', {'_origin': 'f8'}) + node.outputs[index] = DAGOutput(-diff, 'HTR', {'_origin': 'f8'}) - elif htr_balance < htr_minimum: - txin = self.find_txin(htr_minimum - htr_balance, 'HTR') + elif diff > 0: + txin = self.find_txin(diff, 'HTR') node.inputs.add(txin) if 'dummy' in self._builder._nodes: diff --git a/hathor/dag_builder/tokenizer.py b/hathor/dag_builder/tokenizer.py index 041eac32b..19dbbed55 100644 --- a/hathor/dag_builder/tokenizer.py +++ b/hathor/dag_builder/tokenizer.py @@ -14,6 +14,7 @@ import re from enum import Enum, auto +from textwrap import dedent from typing import Any, Iterator """ @@ -27,14 +28,55 @@ a --> b --> c # c is a parent of b which is a parent of a a.out[i] <<< b c d # b, c, and d spend the i-th output of a a < b < c # a must be created before b and b must be created before c - a > b > c # a must be created after b and b must be creater after c - a.attr = value # set value of attribute attr to a + a > b > c # a must be created after b and b must be created after c + a.attr1 = value # set value of attribute attr to a + a.attr2 = "value" # a string literal + + a.attr3 = ``` # a multiline string literal. + if foo: # parsing is limited — there's no support for comments nor escaping characters. + bar # both start and end delimiters must be in their own line. + ``` + +Special keywords: + + b10 < dummy # `dummy` is a tx created automatically that spends genesis tokens and provides + # outputs to txs defined by the user. It's usually useful to set it after some + # block to pass the reward lock Special attributes: + a.out[i] = 100 HTR # set that the i-th output of a holds 100 HTR a.out[i] = 100 TOKEN # set that the i-th output of a holds 100 TOKEN where TOKEN is a custom token a.weight = 50 # set vertex weight +Nano Contracts: + + tx1.nc_id = "{'ff' * 32}" # create a Nano Contract with some custom nc_id + tx1.nc_id = tx2 # create a Nano Contract with another tx's id as its nc_id + tx1.nc_deposit = 10 HTR # perform a deposit in a Nano Contract + tx1.nc_withdrawal = 10 HTR # perform a withdraw in a Nano Contract + tx1.nc_method = initialize("00") # call a Nano Contract method + tx2.nc_method = initialize(`tx1`) # call a Nano Contract method with another tx's id as an argument + tx2.nc_seqnum = 5 + + # Points to a contract created by another contract. + tx1.nc_id = child_contract(contract_creator_id, salt.hex(), blueprint_id.hex()) + +On-chain Blueprints: + + ocb1.ocb_private_key = "{private_key}" # private key bytes in hex to sign the OCB + ocb1.ocb_password = "{password}" # password bytes in hex to sign the OCB + + ocb.ocb_code = "{ocb_code_bytes)}" # create an on-chain Blueprint with some custom code. + # the literal should be the hex value of uncompressed code bytes. + + ocb.ocb_code = ``` + class MyBlueprint(Blueprint): # multiline strings can also be used to directly inline custom code. + pass # given its limitations (describe above), for complex code it is + ``` # recommended to use separate files (see below). + + ocb.ocb_code = my_blueprint.py, MyTest # set a filename and a class name to create an OCB using code from a file. + # configure the root directory when instantiating the DagBuilder. Example: @@ -72,8 +114,22 @@ b5 < c0 < c10 < b20 b6 < tx3 b16 < tx4 + + # Nano Contracts and on-chain Blueprints + ocb1.ocb_private_key = "{unittest.OCB_TEST_PRIVKEY.hex()}" + ocb1.ocb_password = "{unittest.OCB_TEST_PASSWORD.hex()}" + ocb1.ocb_code = "{load_blueprint_code('bet.py', 'Bet').encode().hex()}" + + nc1.nc_id = ocb1 + nc1.nc_method = initialize("00", "00", 0) + + ocb1 <-- b300 + b300 < nc1 + """ +MULTILINE_DELIMITER = '```' + class TokenType(Enum): BLOCKCHAIN = auto() @@ -110,8 +166,29 @@ def tokenize(content: str) -> Iterator[Token]: """ blockchain_re = re.compile(r'^([a-zA-Z][a-zA-Z0-9-_]*)\[([0-9]+)..([0-9]+)\]$') first_parent: str | None + + # A `(name, key, lines)` tuple where `lines` contains the multiline string as it accumulates line by line. + multiline_accumulator: tuple[str, str, list[str]] | None = None + for line in content.split('\n'): line, _, _ = line.partition('#') + + if multiline_accumulator is not None: + if MULTILINE_DELIMITER not in line: + _name, _key, lines = multiline_accumulator + lines.append(line) + continue + + if line.strip() != MULTILINE_DELIMITER: + raise SyntaxError('invalid multiline string end') + + name, key, lines = multiline_accumulator + multiline = dedent('\n'.join(lines)) + complete_value = MULTILINE_DELIMITER + multiline + MULTILINE_DELIMITER + yield TokenType.ATTRIBUTE, (name, key, complete_value) + multiline_accumulator = None + continue + line = line.strip() if not line: continue @@ -140,7 +217,17 @@ def tokenize(content: str) -> Iterator[Token]: attrs = parts[4:] yield (TokenType.OUTPUT, (name, index, amount, token, attrs)) else: - yield (TokenType.ATTRIBUTE, (name, key, ' '.join(parts[2:]))) + value = ' '.join(parts[2:]) + + if MULTILINE_DELIMITER not in value: + yield TokenType.ATTRIBUTE, (name, key, value) + continue + + if value != MULTILINE_DELIMITER: + raise SyntaxError('invalid multiline string start') + + assert multiline_accumulator is None + multiline_accumulator = name, key, [] elif parts[1] == '<--': for _to, _from in collect_pairs(parts, '<--'): @@ -170,3 +257,6 @@ def tokenize(content: str) -> Iterator[Token]: else: raise SyntaxError(line) + + if multiline_accumulator is not None: + raise SyntaxError('unclosed multiline string') diff --git a/hathor/dag_builder/types.py b/hathor/dag_builder/types.py index 46d5af170..8e1c0a4a2 100644 --- a/hathor/dag_builder/types.py +++ b/hathor/dag_builder/types.py @@ -14,11 +14,13 @@ from __future__ import annotations +import ast from collections.abc import Callable from dataclasses import dataclass, field from enum import Enum from typing import Any, Iterator, NamedTuple, TypeAlias +from hathor.dag_builder.utils import get_literal from hathor.transaction import BaseTransaction from hathor.wallet import BaseWallet @@ -33,6 +35,7 @@ class DAGNodeType(Enum): Transaction = 'transaction' Token = 'token' Genesis = 'genesis' + OnChainBlueprint = 'on_chain_blueprint' @dataclass @@ -40,17 +43,52 @@ class DAGNode: name: str type: DAGNodeType - attrs: dict[str, str] = field(default_factory=dict) + attrs: dict[str, Any] = field(default_factory=dict) inputs: set[DAGInput] = field(default_factory=set) outputs: list[DAGOutput | None] = field(default_factory=list) parents: set[str] = field(default_factory=set) deps: set[str] = field(default_factory=set) + # expected balance of inputs and outputs per token + # =0 means sum(txouts) = sum(txins) + # >0 means sum(txouts) > sum(txins), e.g., withdrawal + # <0 means sum(txouts) < sum(txins), e.g., deposit + balances: dict[str, int] = field(default_factory=dict) + def get_all_dependencies(self) -> Iterator[str]: yield from self.parents yield from (name for name, _ in self.inputs) yield from self.deps + def get_attr_ast(self, attr: str) -> Any: + value = self.attrs.get(attr) + assert isinstance(value, ast.AST) + return value + + def get_attr_str(self, attr: str, *, default: str | None = None) -> str: + """Return the value of an attribute, a default, or raise a SyntaxError if it doesn't exist.""" + if value := self.attrs.get(attr): + assert isinstance(value, str) + return value + if default is not None: + return default + raise SyntaxError(f'missing required attribute: {self.name}.{attr}') + + def get_attr_list(self, attr: str, *, default: list[Any] | None = None) -> list[Any]: + """Return the value of an attribute, a default, or raise a SyntaxError if it doesn't exist.""" + if value := self.attrs.get(attr): + assert isinstance(value, list) + return value + if default is not None: + return default + raise SyntaxError(f'missing required attribute: {self.name}.{attr}') + + def get_required_literal(self, attr: str) -> str: + """Return the value of a required attribute as a literal or raise a SyntaxError if it doesn't exist.""" + value = self.get_attr_str(attr) + assert isinstance(value, str) + return get_literal(value) + class DAGInput(NamedTuple): node_name: str diff --git a/hathor/dag_builder/utils.py b/hathor/dag_builder/utils.py new file mode 100644 index 000000000..9432af3f0 --- /dev/null +++ b/hathor/dag_builder/utils.py @@ -0,0 +1,48 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.dag_builder.tokenizer import MULTILINE_DELIMITER + +TEXT_DELIMITER = '"' +LITERAL_DELIMITERS = [TEXT_DELIMITER, MULTILINE_DELIMITER] + + +def is_literal(value: str) -> bool: + """Return true if the value is a literal.""" + return _get_literal_delimiter(value) is not None + + +def get_literal(value: str) -> str: + """Return the content of the literal.""" + delimiter = _get_literal_delimiter(value) + assert delimiter is not None + n = len(delimiter) + return value[n:-n] + + +def _get_literal_delimiter(value: str) -> str | None: + """Return the delimiter if value is a literal, None otherwise.""" + for delimiter in LITERAL_DELIMITERS: + if value.startswith(delimiter) and value.endswith(delimiter): + return delimiter + return None + + +def parse_amount_token(value: str) -> tuple[str, int, list[str]]: + """Parse the format "[amount] [token_symbol] [args]".""" + parts = value.split() + token = parts[1] + amount = int(parts[0]) + args = parts[2:] + return (token, amount, args) diff --git a/hathor/dag_builder/vertex_exporter.py b/hathor/dag_builder/vertex_exporter.py index d56ecf4de..5d96204ac 100644 --- a/hathor/dag_builder/vertex_exporter.py +++ b/hathor/dag_builder/vertex_exporter.py @@ -12,18 +12,33 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Iterator +import ast +import re +from collections import defaultdict +from types import ModuleType +from typing import Iterator, cast + +from typing_extensions import assert_never from hathor.conf.settings import HathorSettings -from hathor.crypto.util import decode_address +from hathor.crypto.util import decode_address, get_address_from_public_key_bytes from hathor.daa import DifficultyAdjustmentAlgorithm -from hathor.dag_builder.builder import DAGBuilder, DAGNode +from hathor.dag_builder.builder import NC_DEPOSIT_KEY, NC_WITHDRAWAL_KEY, DAGBuilder, DAGNode from hathor.dag_builder.types import DAGNodeType, VertexResolverType, WalletFactoryType +from hathor.dag_builder.utils import get_literal, is_literal +from hathor.nanocontracts import Blueprint, OnChainBlueprint +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.on_chain_blueprint import Code +from hathor.nanocontracts.types import BlueprintId, ContractId, NCActionType, VertexId, blueprint_id_from_bytes +from hathor.nanocontracts.utils import derive_child_contract_id, load_builtin_blueprint_for_ocb, sign_pycoin from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.base_transaction import TxInput, TxOutput +from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES from hathor.transaction.scripts.p2pkh import P2PKH from hathor.transaction.token_creation_tx import TokenCreationTransaction -from hathor.wallet import BaseWallet +from hathor.wallet import BaseWallet, HDWallet, KeyPair + +_TEMPLATE_PATTERN = re.compile(r'`(\w+)`') class VertexExporter: @@ -38,6 +53,8 @@ def __init__( genesis_wallet: BaseWallet, wallet_factory: WalletFactoryType, vertex_resolver: VertexResolverType, + nc_catalog: NCBlueprintCatalog, + blueprints_module: ModuleType | None, ) -> None: self._builder = builder self._vertices: dict[str, BaseTransaction] = {} @@ -49,14 +66,23 @@ def __init__( self._daa = daa self._wallet_factory = wallet_factory self._vertex_resolver = vertex_resolver + self._nc_catalog = nc_catalog + self._blueprints_module = blueprints_module self._wallets['genesis'] = genesis_wallet self._wallets['main'] = self._wallet_factory() + self._next_nc_seqnum: defaultdict[bytes, int] = defaultdict(int) + def _get_node(self, name: str) -> DAGNode: """Get node.""" return self._builder._get_node(name) + def get_wallet(self, name: str) -> BaseWallet: + if name not in self._wallets: + self._wallets[name] = self._wallet_factory() + return self._wallets[name] + def get_vertex_id(self, name: str) -> bytes: """Get the vertex id given its node name.""" return self._vertices[name].hash @@ -122,6 +148,21 @@ def _create_vertex_txout( script = self.get_next_p2pkh_script() outputs.append(TxOutput(value=amount, token_data=index, script=script)) + if token_creation: + # Create mint and melt authorities to be used by future transactions + outputs.extend([ + TxOutput( + value=TxOutput.TOKEN_MINT_MASK, + token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1, + script=self.get_next_p2pkh_script(), + ), + TxOutput( + value=TxOutput.TOKEN_MELT_MASK, + token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1, + script=self.get_next_p2pkh_script(), + ), + ]) + return tokens, outputs def get_next_p2pkh_script(self) -> bytes: @@ -152,7 +193,7 @@ def update_vertex_hash(self, vertex: BaseTransaction, *, fix_conflict: bool = Tr self._vertex_resolver(vertex) vertex.update_hash() - def sign_all_inputs(self, node: DAGNode, vertex: Transaction) -> None: + def sign_all_inputs(self, vertex: Transaction, *, node: DAGNode | None = None) -> None: """Sign all inputs of a vertex.""" data_to_sign = vertex.get_sighash_all() for txin in vertex.inputs: @@ -167,6 +208,8 @@ def sign_all_inputs(self, node: DAGNode, vertex: Transaction) -> None: break except KeyError: pass + else: + raise ValueError('private key not found') public_key_bytes, signature = wallet.get_input_aux_data(data_to_sign, private_key) txin.data = P2PKH.create_input_data(public_key_bytes, signature) @@ -185,7 +228,8 @@ def create_vertex_token(self, node: DAGNode) -> TokenCreationTransaction: vertex.token_name = node.name vertex.token_symbol = node.name vertex.timestamp = self.get_min_timestamp(node) - self.sign_all_inputs(node, vertex) + self.add_nano_header_if_needed(node, vertex) + self.sign_all_inputs(vertex, node=node) if 'weight' in node.attrs: vertex.weight = float(node.attrs['weight']) else: @@ -208,6 +252,7 @@ def create_vertex_block(self, node: DAGNode) -> Block: parents = block_parents + txs_parents blk = Block(parents=parents, outputs=outputs) + self.add_nano_header_if_needed(node, blk) blk.timestamp = self.get_min_timestamp(node) + self._settings.AVG_TIME_BETWEEN_BLOCKS blk.get_height = lambda: height # type: ignore[method-assign] blk.update_hash() # the next call fails is blk.hash is None @@ -219,16 +264,196 @@ def create_vertex_block(self, node: DAGNode) -> Block: self._block_height[blk.hash] = height return blk - def create_vertex_transaction(self, node: DAGNode) -> Transaction: + def _get_ast_value_bytes(self, ast_node: ast.AST) -> bytes: + if isinstance(ast_node, ast.Constant): + return bytes.fromhex(ast_node.value) + elif isinstance(ast_node, ast.Name): + return self.get_vertex_id(ast_node.id) + elif isinstance(ast_node, ast.Attribute): + assert isinstance(ast_node.value, ast.Name) + vertex = self._vertices[ast_node.value.id] + assert isinstance(vertex, Transaction) + if ast_node.attr == 'nc_id': + return vertex.get_nano_header().nc_id + else: + raise ValueError + else: + raise ValueError('unsupported ast node') + + def _parse_nc_id(self, ast_node: ast.AST) -> tuple[bytes, BlueprintId | None]: + if not isinstance(ast_node, ast.Call): + return self._get_ast_value_bytes(ast_node), None + + assert isinstance(ast_node.func, ast.Name) + if ast_node.func.id != 'child_contract': + raise ValueError(f'unknown function: {ast_node.func.id}') + args = [self._get_ast_value_bytes(x) for x in ast_node.args] + if len(args) != 3: + raise ValueError('wrong number of args') + parent_id_bytes, salt, blueprint_id_bytes = args + parent_id = ContractId(VertexId(parent_id_bytes)) + blueprint_id = BlueprintId(VertexId(blueprint_id_bytes)) + child_contract_id = derive_child_contract_id(parent_id, salt, blueprint_id) + return child_contract_id, blueprint_id + + def _get_next_nc_seqnum(self, nc_pubkey: bytes) -> int: + address = get_address_from_public_key_bytes(nc_pubkey) + cur = self._next_nc_seqnum[address] + self._next_nc_seqnum[address] = cur + 1 + return cur + + def add_nano_header_if_needed(self, node: DAGNode, vertex: BaseTransaction) -> None: + if 'nc_id' not in node.attrs: + return + + nc_id, blueprint_id = self._parse_nc_id(node.get_attr_ast('nc_id')) + nc_method_raw = node.get_attr_str('nc_method') + + if blueprint_id is None: + if nc_method_raw.startswith('initialize('): + blueprint_id = blueprint_id_from_bytes(nc_id) + else: + contract_creation_vertex = self._vertice_per_id[nc_id] + assert contract_creation_vertex.is_nano_contract() + assert isinstance(contract_creation_vertex, Transaction) + contract_creation_vertex_nano_header = contract_creation_vertex.get_nano_header() + blueprint_id = blueprint_id_from_bytes(contract_creation_vertex_nano_header.nc_id) + + blueprint_class = self._get_blueprint_class(blueprint_id) + + # allows method calls such as + # nc2.nc_method = call_another_nc(`nc1`) + def _replace_escaped_vertex_id(match: re.Match) -> str: + vertex_name = match.group(1) + if vertex_ := self._vertices.get(vertex_name): + return f'"{vertex_.hash_hex}"' + raise SyntaxError(f'unknown vertex: {vertex_name}') + + if raw_args_bytes := node.get_attr_str('nc_args_bytes', default=''): + nc_method = nc_method_raw + nc_args_bytes = bytes.fromhex(get_literal(raw_args_bytes)) + else: + from hathor.nanocontracts.api_arguments_parser import parse_nc_method_call + from hathor.nanocontracts.method import Method + nc_method_raw = _TEMPLATE_PATTERN.sub(_replace_escaped_vertex_id, nc_method_raw) + nc_method, nc_args = parse_nc_method_call(blueprint_class, nc_method_raw) + method = Method.from_callable(getattr(blueprint_class, nc_method)) + nc_args_bytes = method.serialize_args_bytes(nc_args) + + wallet_name = node.attrs.get('nc_address', f'node_{node.name}') + wallet = self.get_wallet(wallet_name) + assert isinstance(wallet, HDWallet) + privkey = wallet.get_key_at_index(0) + + from hathor.transaction.headers.nano_header import NanoHeaderAction + nc_actions = [] + + def append_actions(action: NCActionType, key: str) -> None: + actions = node.get_attr_list(key, default=[]) + for token_name, value in actions: + assert isinstance(token_name, str) + assert isinstance(value, int) + token_index = 0 + if token_name != 'HTR': + assert isinstance(vertex, Transaction) + token_creation_tx = self._vertices[token_name] + if token_creation_tx.hash not in vertex.tokens: + # when depositing, the token uid must be added to the tokens list + # because it's possible that there are no outputs with this token. + assert action == NCActionType.DEPOSIT + vertex.tokens.append(token_creation_tx.hash) + token_index = 1 + vertex.tokens.index(token_creation_tx.hash) + + nc_actions.append(NanoHeaderAction( + type=action, + token_index=token_index, + amount=value, + )) + + append_actions(NCActionType.DEPOSIT, NC_DEPOSIT_KEY) + append_actions(NCActionType.WITHDRAWAL, NC_WITHDRAWAL_KEY) + + from hathor.transaction.headers import NanoHeader + nano_header = NanoHeader( + # Even though we know the NanoHeader only supports Transactions, we force the typing here so we can test + # that other types of vertices such as blocks would fail verification by using an unsupported header. + tx=cast(Transaction, vertex), + nc_seqnum=0, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_actions=nc_actions, + nc_address=b'\x00' * ADDRESS_LEN_BYTES, + nc_script=b'', + ) + vertex.headers.append(nano_header) + + if isinstance(vertex, Transaction): + sign_pycoin(nano_header, privkey) + + if 'nc_seqnum' in node.attrs: + nano_header.nc_seqnum = int(node.attrs['nc_seqnum']) + else: + nano_header.nc_seqnum = self._get_next_nc_seqnum(nano_header.nc_address) + + def create_vertex_on_chain_blueprint(self, node: DAGNode) -> OnChainBlueprint: + """Create an OnChainBlueprint given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node) + + assert len(block_parents) == 0 + ocb = OnChainBlueprint(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) + self.add_nano_header_if_needed(node, ocb) + code_attr = node.get_attr_str('ocb_code') + + if is_literal(code_attr): + code_literal = get_literal(code_attr) + try: + code_bytes = bytes.fromhex(code_literal) + except ValueError: + code_str = code_literal + else: + code_str = code_bytes.decode() + else: + assert self._blueprints_module is not None + filename, _, class_name = code_attr.partition(',') + filename, class_name = filename.strip(), class_name.strip() + if not filename or not class_name: + raise SyntaxError(f'missing blueprint filename or class name: {code_attr}') + code_str = load_builtin_blueprint_for_ocb(filename, class_name, self._blueprints_module) + + ocb.code = Code.from_python_code(code_str, self._settings) + ocb.timestamp = self.get_min_timestamp(node) + self.sign_all_inputs(ocb, node=node) + + private_key_literal = node.get_required_literal('ocb_private_key') + private_key_bytes = bytes.fromhex(private_key_literal) + password_literal = node.get_required_literal('ocb_password') + password_bytes = bytes.fromhex(password_literal) + key = KeyPair(private_key_bytes) + private_key = key.get_private_key(password_bytes) + ocb.sign(private_key) + + if 'weight' in node.attrs: + ocb.weight = float(node.attrs['weight']) + else: + ocb.weight = self._daa.minimum_tx_weight(ocb) + + self.update_vertex_hash(ocb) + return ocb + + def create_vertex_transaction(self, node: DAGNode, *, cls: type[Transaction] = Transaction) -> Transaction: """Create a Transaction given a node.""" block_parents, txs_parents = self._create_vertex_parents(node) inputs = self._create_vertex_txin(node) tokens, outputs = self._create_vertex_txout(node) assert len(block_parents) == 0 - tx = Transaction(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) + tx = cls(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) tx.timestamp = self.get_min_timestamp(node) - self.sign_all_inputs(node, tx) + self.add_nano_header_if_needed(node, tx) + self.sign_all_inputs(tx, node=node) if 'weight' in node.attrs: tx.weight = float(node.attrs['weight']) else: @@ -283,14 +508,21 @@ def create_vertex(self, node: DAGNode) -> BaseTransaction: case DAGNodeType.Genesis: vertex = self.create_genesis_vertex(node) + case DAGNodeType.OnChainBlueprint: + vertex = self.create_vertex_on_chain_blueprint(node) + + case DAGNodeType.Unknown: + raise AssertionError('dag type should be known at this point') + case _: - raise NotImplementedError(node.type) + assert_never(node.type) assert vertex is not None assert vertex.hash not in self._vertice_per_id assert node.name not in self._vertices self._vertice_per_id[vertex.hash] = vertex self._vertices[node.name] = vertex + vertex.name = node.name return vertex def export(self) -> Iterator[tuple[DAGNode, BaseTransaction]]: @@ -303,3 +535,12 @@ def export(self) -> Iterator[tuple[DAGNode, BaseTransaction]]: vertex = self.create_vertex(node) if node.type is not DAGNodeType.Genesis: yield node, vertex + + def _get_blueprint_class(self, blueprint_id: BlueprintId) -> type[Blueprint]: + """Get a blueprint class from the catalog or from our own on-chain blueprints.""" + if blueprint_class := self._nc_catalog.get_blueprint_class(blueprint_id): + return blueprint_class + ocb = self._vertice_per_id.get(blueprint_id) + if ocb is None or not isinstance(ocb, OnChainBlueprint): + raise SyntaxError(f'{blueprint_id.hex()} is not a valid blueprint id') + return ocb.get_blueprint_class() diff --git a/hathor/event/event_manager.py b/hathor/event/event_manager.py index 6ce402b82..8eb828b28 100644 --- a/hathor/event/event_manager.py +++ b/hathor/event/event_manager.py @@ -47,6 +47,7 @@ HathorEvents.REORG_FINISHED, HathorEvents.CONSENSUS_TX_UPDATE, HathorEvents.CONSENSUS_TX_REMOVED, + HathorEvents.NC_EVENT, ] diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index 1903ef74e..ad2921309 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -12,12 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional, TypeAlias, Union, cast +from __future__ import annotations + +from typing import Any, Optional, TypeAlias, Union, cast from pydantic import Extra, validator from typing_extensions import Self +from hathor.crypto.util import get_address_b58_from_bytes from hathor.pubsub import EventArguments +from hathor.transaction import Transaction +from hathor.transaction.headers import VertexHeaderId from hathor.utils.pydantic import BaseModel @@ -31,7 +36,8 @@ class TxOutput(BaseModel, extra=Extra.ignore): value: int token_data: int script: str - decoded: Optional[DecodedTxOutput] + # Instead of None, an empty dict represents an unknown script, as requested by our wallet-service use case. + decoded: DecodedTxOutput | dict[Any, Any] class TxInput(BaseModel): @@ -40,6 +46,18 @@ class TxInput(BaseModel): spent_output: TxOutput +class NanoHeader(BaseModel): + id: str + nc_seqnum: int + nc_id: str + nc_method: str + nc_address: str + + +# Union type to model all header types, currently only nano header exists +TxHeader: TypeAlias = NanoHeader + + class SpentOutput(BaseModel): index: int tx_ids: list[str] @@ -60,6 +78,7 @@ class TxMetadata(BaseModel, extra=Extra.ignore): first_block: Optional[str] height: int validation: str + nc_execution: str | None @validator('spent_outputs', pre=True, each_item=True) def _parse_spent_outputs(cls, spent_output: Union[SpentOutput, list[Union[int, list[str]]]]) -> SpentOutput: @@ -115,6 +134,7 @@ class TxDataWithoutMeta(BaseEventData, extra=Extra.ignore): token_name: Optional[str] token_symbol: Optional[str] aux_pow: Optional[str] = None + headers: list[TxHeader] = [] @classmethod def from_event_arguments(cls, args: EventArguments) -> Self: @@ -123,17 +143,9 @@ def from_event_arguments(cls, args: EventArguments) -> Self: tx_json = tx_extra_data_json['tx'] meta_json = tx_extra_data_json['meta'] tx_json['metadata'] = meta_json - tx_json['outputs'] = [ - output | dict(decoded=output['decoded'] or None) - for output in tx_json['outputs'] - ] inputs = [] for tx_input in tx_json['inputs']: - decoded = tx_input.get('decoded') - if decoded and decoded.get('address') is None: - # we remove the decoded data if it does not contain an address - tx_input['decoded'] = None inputs.append( dict( tx_id=tx_input['tx_id'], @@ -143,6 +155,22 @@ def from_event_arguments(cls, args: EventArguments) -> Self: ) tx_json['inputs'] = inputs + + headers = [] + if args.tx.is_nano_contract(): + assert isinstance(args.tx, Transaction) + nano_header = args.tx.get_nano_header() + headers.append( + dict( + id=VertexHeaderId.NANO_HEADER.value.hex(), + nc_seqnum=nano_header.nc_seqnum, + nc_id=nano_header.nc_id.hex(), + nc_method=nano_header.nc_method, + nc_address=get_address_b58_from_bytes(nano_header.nc_address), + ) + ) + + tx_json['headers'] = headers return cls(**tx_json) @@ -167,5 +195,38 @@ def from_event_arguments(cls, args: EventArguments) -> 'ReorgData': ) +class NCEventData(BaseEventData): + """Class that represents data for a custom nano contract event.""" + + # The ID of the transaction that executed a nano contract. + vertex_id: str + + # The ID of the nano contract that was executed. + nc_id: str + + # The nano contract execution state. + nc_execution: str + + # The block that confirmed this transaction, executing the nano contract. + first_block: str + + # Custom data provided by the blueprint. + data_hex: str + + @classmethod + def from_event_arguments(cls, args: EventArguments) -> NCEventData: + meta = args.tx.get_metadata() + assert meta.nc_execution is not None + assert meta.first_block is not None + + return cls( + vertex_id=args.tx.hash_hex, + nc_id=args.nc_event.nc_id.hex(), + nc_execution=meta.nc_execution, + first_block=meta.first_block.hex(), + data_hex=args.nc_event.data.hex(), + ) + + # Union type to encompass BaseEventData polymorphism -EventData: TypeAlias = EmptyData | TxData | TxDataWithoutMeta | ReorgData +EventData: TypeAlias = EmptyData | TxData | TxDataWithoutMeta | ReorgData | NCEventData diff --git a/hathor/event/model/event_type.py b/hathor/event/model/event_type.py index 38e968427..bba786664 100644 --- a/hathor/event/model/event_type.py +++ b/hathor/event/model/event_type.py @@ -14,7 +14,7 @@ from enum import Enum -from hathor.event.model.event_data import BaseEventData, EmptyData, ReorgData, TxData, TxDataWithoutMeta +from hathor.event.model.event_data import BaseEventData, EmptyData, NCEventData, ReorgData, TxData, TxDataWithoutMeta from hathor.pubsub import HathorEvents @@ -27,6 +27,7 @@ class EventType(Enum): VERTEX_METADATA_CHANGED = 'VERTEX_METADATA_CHANGED' VERTEX_REMOVED = 'VERTEX_REMOVED' FULL_NODE_CRASHED = 'FULL_NODE_CRASHED' + NC_EVENT = 'NC_EVENT' @classmethod def from_hathor_event(cls, hathor_event: HathorEvents) -> 'EventType': @@ -46,7 +47,8 @@ def data_type(self) -> type[BaseEventData]: HathorEvents.REORG_STARTED: EventType.REORG_STARTED, HathorEvents.REORG_FINISHED: EventType.REORG_FINISHED, HathorEvents.CONSENSUS_TX_UPDATE: EventType.VERTEX_METADATA_CHANGED, - HathorEvents.CONSENSUS_TX_REMOVED: EventType.VERTEX_REMOVED + HathorEvents.CONSENSUS_TX_REMOVED: EventType.VERTEX_REMOVED, + HathorEvents.NC_EVENT: EventType.NC_EVENT } _EVENT_TYPE_TO_EVENT_DATA: dict[EventType, type[BaseEventData]] = { @@ -58,4 +60,5 @@ def data_type(self) -> type[BaseEventData]: EventType.VERTEX_METADATA_CHANGED: TxData, EventType.VERTEX_REMOVED: TxDataWithoutMeta, EventType.FULL_NODE_CRASHED: EmptyData, + EventType.NC_EVENT: NCEventData, } diff --git a/hathor/event/storage/__init__.py b/hathor/event/storage/__init__.py index 57017aa4c..aaaeb30d7 100644 --- a/hathor/event/storage/__init__.py +++ b/hathor/event/storage/__init__.py @@ -13,7 +13,6 @@ # limitations under the License. from hathor.event.storage.event_storage import EventStorage -from hathor.event.storage.memory_storage import EventMemoryStorage from hathor.event.storage.rocksdb_storage import EventRocksDBStorage -__all__ = ['EventStorage', 'EventMemoryStorage', 'EventRocksDBStorage'] +__all__ = ['EventStorage', 'EventRocksDBStorage'] diff --git a/hathor/event/storage/memory_storage.py b/hathor/event/storage/memory_storage.py deleted file mode 100644 index 6de5c6df5..000000000 --- a/hathor/event/storage/memory_storage.py +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright 2022 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Iterable, Iterator, Optional - -from hathor.event.model.base_event import BaseEvent -from hathor.event.model.node_state import NodeState -from hathor.event.storage.event_storage import EventStorage - - -class EventMemoryStorage(EventStorage): - def __init__(self) -> None: - self._events: list[BaseEvent] = [] - self._last_event: Optional[BaseEvent] = None - self._last_group_id: Optional[int] = None - self._stream_id: Optional[str] = None - self._node_state: Optional[NodeState] = None - self._event_queue_enabled: bool = False - - def save_event(self, event: BaseEvent) -> None: - if event.id != len(self._events): - raise ValueError('invalid event.id, ids must be sequential and leave no gaps') - self._last_event = event - if event.group_id is not None: - self._last_group_id = event.group_id - self._events.append(event) - - def save_events(self, events: Iterable[BaseEvent]) -> None: - for event in events: - self.save_event(event) - - def get_event(self, key: int) -> Optional[BaseEvent]: - if key < 0: - raise ValueError(f'event.id \'{key}\' must be non-negative') - if key >= len(self._events): - return None - event = self._events[key] - assert event.id == key - return event - - def get_last_event(self) -> Optional[BaseEvent]: - return self._last_event - - def get_last_group_id(self) -> Optional[int]: - return self._last_group_id - - def iter_from_event(self, key: int) -> Iterator[BaseEvent]: - if key < 0: - raise ValueError(f'event.id \'{key}\' must be non-negative') - - while key < len(self._events): - yield self._events[key] - key += 1 - - def reset_events(self) -> None: - self._events = [] - self._last_event = None - self._last_group_id = None - self._stream_id = None - - def reset_all(self) -> None: - self.reset_events() - self._node_state = None - self._event_queue_enabled = False - - def save_node_state(self, state: NodeState) -> None: - self._node_state = state - - def get_node_state(self) -> Optional[NodeState]: - return self._node_state - - def save_event_queue_state(self, enabled: bool) -> None: - self._event_queue_enabled = enabled - - def get_event_queue_state(self) -> bool: - return self._event_queue_enabled - - def save_stream_id(self, stream_id: str) -> None: - self._stream_id = stream_id - - def get_stream_id(self) -> Optional[str]: - return self._stream_id diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index 58a51a3f5..4e5671093 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -29,3 +29,6 @@ class Feature(str, Enum): NOP_FEATURE_3 = 'NOP_FEATURE_3' INCREASE_MAX_MERKLE_PATH_LENGTH = 'INCREASE_MAX_MERKLE_PATH_LENGTH' + COUNT_CHECKDATASIG_OP = 'COUNT_CHECKDATASIG_OP' + + NANO_CONTRACTS = 'NANO_CONTRACTS' diff --git a/hathor/indexes/__init__.py b/hathor/indexes/__init__.py index 7bbabca88..d11fcbfec 100644 --- a/hathor/indexes/__init__.py +++ b/hathor/indexes/__init__.py @@ -13,12 +13,11 @@ # limitations under the License. from hathor.indexes.address_index import AddressIndex -from hathor.indexes.manager import IndexesManager, MemoryIndexesManager, RocksDBIndexesManager +from hathor.indexes.manager import IndexesManager, RocksDBIndexesManager from hathor.indexes.timestamp_index import TimestampIndex __all__ = [ 'IndexesManager', - 'MemoryIndexesManager', 'RocksDBIndexesManager', 'AddressIndex', 'TimestampIndex', diff --git a/hathor/indexes/base_index.py b/hathor/indexes/base_index.py index 98b1c0721..5d1cb87b2 100644 --- a/hathor/indexes/base_index.py +++ b/hathor/indexes/base_index.py @@ -19,7 +19,6 @@ from structlog import get_logger -from hathor.conf.get_settings import get_global_settings from hathor.indexes.scope import Scope from hathor.transaction.base_transaction import BaseTransaction @@ -36,8 +35,8 @@ class BaseIndex(ABC): This class exists so we can interact with indexes without knowing anything specific to its implemented. It was created to generalize how we initialize indexes and keep track of which ones are up-to-date. """ - def __init__(self, *, settings: HathorSettings | None = None) -> None: - self._settings = settings or get_global_settings() + def __init__(self, *, settings: HathorSettings) -> None: + self._settings = settings self.log = logger.new() def init_start(self, indexes_manager: 'IndexesManager') -> None: @@ -57,7 +56,7 @@ def get_db_name(self) -> Optional[str]: """ The returned string is used to generate the relevant attributes for storing an indexe's state in the db. If None is returned, the database will not store the index initialization state and they will always be - initialized. This is the expected mode that memory-only indexes will use. + initialized. """ raise NotImplementedError diff --git a/hathor/indexes/blueprint_history_index.py b/hathor/indexes/blueprint_history_index.py new file mode 100644 index 000000000..961b0555c --- /dev/null +++ b/hathor/indexes/blueprint_history_index.py @@ -0,0 +1,79 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import abstractmethod +from typing import Iterator + +from hathor.indexes.scope import Scope +from hathor.indexes.tx_group_index import TxGroupIndex +from hathor.transaction import BaseTransaction, Transaction + +SCOPE = Scope( + include_blocks=False, + include_txs=True, + include_voided=True, +) + + +class BlueprintHistoryIndex(TxGroupIndex[bytes]): + """Index of all Nano Contracts of a Blueprint.""" + + def get_scope(self) -> Scope: + return SCOPE + + def init_loop_step(self, tx: BaseTransaction) -> None: + self.add_tx(tx) + + @abstractmethod + def add_tx(self, tx: BaseTransaction) -> None: + """Add tx to this index. + """ + raise NotImplementedError + + @abstractmethod + def remove_tx(self, tx: BaseTransaction) -> None: + """Remove tx from this index. + """ + raise NotImplementedError + + def _extract_keys(self, tx: BaseTransaction) -> Iterator[bytes]: + if not tx.is_nano_contract(): + return + assert isinstance(tx, Transaction) + nano_header = tx.get_nano_header() + if not nano_header.is_creating_a_new_contract(): + return + yield nano_header.nc_id + + def get_newest(self, blueprint_id: bytes) -> Iterator[bytes]: + """Get a list of nano_contract_ids sorted by timestamp for a given blueprint_id starting from the newest.""" + return self._get_sorted_from_key(blueprint_id, reverse=True) + + def get_oldest(self, blueprint_id: bytes) -> Iterator[bytes]: + """Get a list of nano_contract_ids sorted by timestamp for a given blueprint_id starting from the oldest.""" + return self._get_sorted_from_key(blueprint_id) + + def get_older(self, blueprint_id: bytes, tx_start: BaseTransaction) -> Iterator[bytes]: + """ + Get a list of nano_contract_ids sorted by timestamp for a given blueprint_id that are older than tx_start. + """ + return self._get_sorted_from_key(blueprint_id, tx_start=tx_start, reverse=True) + + def get_newer(self, blueprint_id: bytes, tx_start: BaseTransaction) -> Iterator[bytes]: + """ + Get a list of nano_contract_ids sorted by timestamp for a given blueprint_id that are newer than tx_start. + """ + return self._get_sorted_from_key(blueprint_id, tx_start=tx_start) diff --git a/hathor/indexes/blueprint_timestamp_index.py b/hathor/indexes/blueprint_timestamp_index.py new file mode 100644 index 000000000..1cc8a4291 --- /dev/null +++ b/hathor/indexes/blueprint_timestamp_index.py @@ -0,0 +1,39 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import final + +from hathor.indexes.rocksdb_vertex_timestamp_index import RocksDBVertexTimestampIndex +from hathor.indexes.scope import Scope +from hathor.transaction import BaseTransaction + +SCOPE = Scope( + include_blocks=False, + include_txs=True, + include_voided=True, +) + + +class BlueprintTimestampIndex(RocksDBVertexTimestampIndex): + """Index of on-chain Blueprints sorted by their timestamps.""" + cf_name = b'blueprint-index' + db_name = 'on-chain-blueprints' + + def get_scope(self) -> Scope: + return SCOPE + + @final + def _should_add(self, tx: BaseTransaction) -> bool: + from hathor.nanocontracts import OnChainBlueprint + return isinstance(tx, OnChainBlueprint) diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index 351c0e2ab..3a51e8fd6 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -12,27 +12,35 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import operator from abc import ABC, abstractmethod from functools import reduce from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger +from typing_extensions import assert_never -from hathor.conf.settings import HathorSettings from hathor.indexes.address_index import AddressIndex from hathor.indexes.base_index import BaseIndex +from hathor.indexes.blueprint_history_index import BlueprintHistoryIndex +from hathor.indexes.blueprint_timestamp_index import BlueprintTimestampIndex from hathor.indexes.height_index import HeightIndex from hathor.indexes.info_index import InfoIndex from hathor.indexes.mempool_tips_index import MempoolTipsIndex +from hathor.indexes.nc_creation_index import NCCreationIndex +from hathor.indexes.nc_history_index import NCHistoryIndex from hathor.indexes.timestamp_index import ScopeType as TimestampScopeType, TimestampIndex from hathor.indexes.tips_index import ScopeType as TipsScopeType, TipsIndex from hathor.indexes.tokens_index import TokensIndex from hathor.indexes.utxo_index import UtxoIndex from hathor.transaction import BaseTransaction +from hathor.transaction.nc_execution_state import NCExecutionState from hathor.util import tx_progress if TYPE_CHECKING: # pragma: no cover + from hathor.conf.settings import HathorSettings from hathor.pubsub import PubSubManager from hathor.storage import RocksDBStorage from hathor.transaction.storage import TransactionStorage @@ -65,6 +73,10 @@ class IndexesManager(ABC): addresses: Optional[AddressIndex] tokens: Optional[TokensIndex] utxo: Optional[UtxoIndex] + nc_creation: Optional[NCCreationIndex] + nc_history: Optional[NCHistoryIndex] + blueprints: Optional[BlueprintTimestampIndex] + blueprint_history: Optional[BlueprintHistoryIndex] def __init_checks__(self): """ Implementations must call this at the **end** of their __init__ for running ValueError checks.""" @@ -93,6 +105,10 @@ def iter_all_indexes(self) -> Iterator[BaseIndex]: self.addresses, self.tokens, self.utxo, + self.nc_creation, + self.nc_history, + self.blueprints, + self.blueprint_history, ]) @abstractmethod @@ -115,6 +131,11 @@ def enable_mempool_index(self) -> None: """Enable mempool index. It does nothing if it has already been enabled.""" raise NotImplementedError + @abstractmethod + def enable_nc_indexes(self) -> None: + """Enable Nano Contract related indexes.""" + raise NotImplementedError + def force_clear_all(self) -> None: """ Force clear all indexes. """ @@ -190,6 +211,150 @@ def update(self, tx: BaseTransaction) -> None: if self.utxo: self.utxo.update(tx) + def handle_contract_execution(self, tx: BaseTransaction) -> None: + """ + Update indexes according to a Nano Contract execution. + Must be called only once for each time a contract is executed. + """ + from hathor.conf.settings import HATHOR_TOKEN_UID + from hathor.nanocontracts.runner.types import ( + NCIndexUpdateRecord, + SyscallCreateContractRecord, + SyscallUpdateTokensRecord, + UpdateAuthoritiesRecord, + ) + from hathor.nanocontracts.types import ContractId + from hathor.transaction.nc_execution_state import NCExecutionState + + meta = tx.get_metadata() + assert tx.is_nano_contract() + assert meta.nc_execution is NCExecutionState.SUCCESS + assert meta.nc_calls + first_call = meta.nc_calls[0] + index_records: list[NCIndexUpdateRecord] = [] + + # Add to indexes. + for call in meta.nc_calls: + # Txs that call other contracts are added to those contracts' history. This includes calls to `initialize`. + if self.nc_history: + self.nc_history.add_single_key(call.contract_id, tx) + + # Accumulate all index update records. + index_records.extend(call.index_updates) + + created_contracts: set[ContractId] = set() + for record in index_records: + match record: + case SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): + assert contract_id not in created_contracts, f'contract {contract_id.hex()} created multiple times' + assert contract_id != first_call.contract_id, ( + f'contract {contract_id.hex()} cannot make a syscall to create itself' + ) + created_contracts.add(contract_id) + + # Txs that create other contracts are added to the NC creation index and blueprint index. + # They're already added to the NC history index, above. + if self.nc_creation: + self.nc_creation.manually_add_tx(tx) + + if self.blueprint_history: + self.blueprint_history.add_single_key(blueprint_id, tx) + + case SyscallUpdateTokensRecord(): + # Minted/melted tokens are added/removed to/from the tokens index, + # and the respective destroyed/created HTR too. + if self.tokens: + try: + self.tokens.get_token_info(record.token_uid) + except KeyError: + # If the token doesn't exist in the index yet, it must be a token creation syscall. + from hathor.nanocontracts.runner.types import IndexUpdateRecordType + assert record.type is IndexUpdateRecordType.CREATE_TOKEN, record.type + assert record.token_name is not None and record.token_symbol is not None + self.tokens.create_token_info_from_contract( + token_uid=record.token_uid, + name=record.token_name, + symbol=record.token_symbol, + ) + + self.tokens.add_to_total(record.token_uid, record.token_amount) + self.tokens.add_to_total(HATHOR_TOKEN_UID, record.htr_amount) + + case UpdateAuthoritiesRecord(): + if self.tokens: + self.tokens.update_authorities_from_contract(record) + + case _: + assert_never(record) + + def handle_contract_unexecution(self, tx: BaseTransaction) -> None: + """ + Update indexes according to a Nano Contract unexecution, which happens when a reorg unconfirms a nano tx. + Must be called only once for each time a contract is unexecuted. + """ + from hathor.conf.settings import HATHOR_TOKEN_UID + from hathor.nanocontracts.runner.types import ( + NCIndexUpdateRecord, + SyscallCreateContractRecord, + SyscallUpdateTokensRecord, + UpdateAuthoritiesRecord, + ) + from hathor.nanocontracts.types import NC_INITIALIZE_METHOD, ContractId + + meta = tx.get_metadata() + assert tx.is_nano_contract() + assert meta.nc_execution is NCExecutionState.SUCCESS + assert meta.nc_calls + first_call = meta.nc_calls[0] + records: list[NCIndexUpdateRecord] = [] + + # Remove from indexes, but we must keep the first call's contract still in the indexes. + for call in meta.nc_calls: + # Remove from nc_history except where it's the same contract as the first call. + if self.nc_history and call.contract_id != first_call.contract_id: + self.nc_history.remove_single_key(call.contract_id, tx) + + # Accumulate all syscalls. + records.extend(call.index_updates) + + created_contracts: set[ContractId] = set() + for record in records: + match record: + case SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): + assert contract_id not in created_contracts, f'contract {contract_id.hex()} created multiple times' + assert contract_id != first_call.contract_id, ( + f'contract {contract_id.hex()} cannot make a syscall to create itself' + ) + created_contracts.add(contract_id) + + # Remove only when the first call is not creating a contract, that is, + # if the tx itself is a nc creation, it must be kept in the indexes. + if first_call.method_name != NC_INITIALIZE_METHOD: + # Remove from nc_creation. + if self.nc_creation: + self.nc_creation.del_tx(tx) + + # Remove from blueprint_history. + if self.blueprint_history: + self.blueprint_history.remove_single_key(blueprint_id, tx) + + case SyscallUpdateTokensRecord(): + # Undo the tokens update. + if self.tokens: + self.tokens.add_to_total(record.token_uid, -record.token_amount) + self.tokens.add_to_total(HATHOR_TOKEN_UID, -record.htr_amount) + + from hathor.nanocontracts.runner.types import IndexUpdateRecordType + if record.type is IndexUpdateRecordType.CREATE_TOKEN: + self.tokens.destroy_token(record.token_uid) + + case UpdateAuthoritiesRecord(): + if self.tokens: + self.tokens.update_authorities_from_contract(record, undo=True) + + case _: + assert_never(record) + def add_tx(self, tx: BaseTransaction) -> bool: """ Add a transaction to the indexes @@ -217,6 +382,18 @@ def add_tx(self, tx: BaseTransaction) -> bool: if self.tokens: self.tokens.add_tx(tx) + if self.nc_creation: + self.nc_creation.add_tx(tx) + + if self.nc_history: + self.nc_history.add_tx(tx) + + if self.blueprints: + self.blueprints.add_tx(tx) + + if self.blueprint_history: + self.blueprint_history.add_tx(tx) + # We need to check r1 as well to make sure we don't count twice the transactions/blocks that are # just changing from voided to executed or vice-versa if r1 and r3: @@ -241,6 +418,14 @@ def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: self.addresses.remove_tx(tx) if self.utxo: self.utxo.del_tx(tx) + if self.nc_creation: + self.nc_creation.del_tx(tx) + if self.nc_history: + self.nc_history.remove_tx(tx) + if self.blueprints: + self.blueprints.del_tx(tx) + if self.blueprint_history: + self.blueprint_history.remove_tx(tx) self.info.update_counts(tx, remove=True) # mempool will pick-up if the transaction is voided/invalid and remove it @@ -259,75 +444,34 @@ def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: self.tokens.del_tx(tx, remove_all=remove_all) -class MemoryIndexesManager(IndexesManager): - def __init__(self, *, settings: HathorSettings | None = None) -> None: - from hathor.indexes.memory_height_index import MemoryHeightIndex - from hathor.indexes.memory_info_index import MemoryInfoIndex - from hathor.indexes.memory_timestamp_index import MemoryTimestampIndex - from hathor.indexes.memory_tips_index import MemoryTipsIndex - - self.info = MemoryInfoIndex() - self.all_tips = MemoryTipsIndex(scope_type=TipsScopeType.ALL) - self.block_tips = MemoryTipsIndex(scope_type=TipsScopeType.BLOCKS) - self.tx_tips = MemoryTipsIndex(scope_type=TipsScopeType.TXS) - - self.sorted_all = MemoryTimestampIndex(scope_type=TimestampScopeType.ALL) - self.sorted_blocks = MemoryTimestampIndex(scope_type=TimestampScopeType.BLOCKS) - self.sorted_txs = MemoryTimestampIndex(scope_type=TimestampScopeType.TXS) - - self.addresses = None - self.tokens = None - self.utxo = None - self.height = MemoryHeightIndex(settings=settings) - self.mempool_tips = None - - # XXX: this has to be at the end of __init__, after everything has been initialized - self.__init_checks__() - - def enable_address_index(self, pubsub: 'PubSubManager') -> None: - from hathor.indexes.memory_address_index import MemoryAddressIndex - if self.addresses is None: - self.addresses = MemoryAddressIndex(pubsub) - - def enable_tokens_index(self) -> None: - from hathor.indexes.memory_tokens_index import MemoryTokensIndex - if self.tokens is None: - self.tokens = MemoryTokensIndex() - - def enable_utxo_index(self) -> None: - from hathor.indexes.memory_utxo_index import MemoryUtxoIndex - if self.utxo is None: - self.utxo = MemoryUtxoIndex() - - def enable_mempool_index(self) -> None: - from hathor.indexes.memory_mempool_tips_index import MemoryMempoolTipsIndex - if self.mempool_tips is None: - self.mempool_tips = MemoryMempoolTipsIndex() - - class RocksDBIndexesManager(IndexesManager): - def __init__(self, rocksdb_storage: 'RocksDBStorage') -> None: + def __init__(self, rocksdb_storage: 'RocksDBStorage', *, settings: HathorSettings) -> None: from hathor.indexes.partial_rocksdb_tips_index import PartialRocksDBTipsIndex from hathor.indexes.rocksdb_height_index import RocksDBHeightIndex from hathor.indexes.rocksdb_info_index import RocksDBInfoIndex from hathor.indexes.rocksdb_timestamp_index import RocksDBTimestampIndex + self.settings = settings self._db = rocksdb_storage.get_db() - self.info = RocksDBInfoIndex(self._db) - self.height = RocksDBHeightIndex(self._db) - self.all_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.ALL) - self.block_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.BLOCKS) - self.tx_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.TXS) + self.info = RocksDBInfoIndex(self._db, settings=settings) + self.height = RocksDBHeightIndex(self._db, settings=settings) + self.all_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.ALL, settings=settings) + self.block_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.BLOCKS, settings=settings) + self.tx_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.TXS, settings=settings) - self.sorted_all = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.ALL) - self.sorted_blocks = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.BLOCKS) - self.sorted_txs = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.TXS) + self.sorted_all = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.ALL, settings=settings) + self.sorted_blocks = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.BLOCKS, settings=settings) + self.sorted_txs = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.TXS, settings=settings) self.addresses = None self.tokens = None self.utxo = None self.mempool_tips = None + self.nc_creation = None + self.nc_history = None + self.blueprints = None + self.blueprint_history = None # XXX: this has to be at the end of __init__, after everything has been initialized self.__init_checks__() @@ -335,20 +479,33 @@ def __init__(self, rocksdb_storage: 'RocksDBStorage') -> None: def enable_address_index(self, pubsub: 'PubSubManager') -> None: from hathor.indexes.rocksdb_address_index import RocksDBAddressIndex if self.addresses is None: - self.addresses = RocksDBAddressIndex(self._db, pubsub=pubsub) + self.addresses = RocksDBAddressIndex(self._db, pubsub=pubsub, settings=self.settings) def enable_tokens_index(self) -> None: from hathor.indexes.rocksdb_tokens_index import RocksDBTokensIndex if self.tokens is None: - self.tokens = RocksDBTokensIndex(self._db) + self.tokens = RocksDBTokensIndex(self._db, settings=self.settings) def enable_utxo_index(self) -> None: from hathor.indexes.rocksdb_utxo_index import RocksDBUtxoIndex if self.utxo is None: - self.utxo = RocksDBUtxoIndex(self._db) + self.utxo = RocksDBUtxoIndex(self._db, settings=self.settings) def enable_mempool_index(self) -> None: from hathor.indexes.memory_mempool_tips_index import MemoryMempoolTipsIndex if self.mempool_tips is None: # XXX: use of RocksDBMempoolTipsIndex is very slow and was suspended - self.mempool_tips = MemoryMempoolTipsIndex() + self.mempool_tips = MemoryMempoolTipsIndex(settings=self.settings) + + def enable_nc_indexes(self) -> None: + from hathor.indexes.blueprint_timestamp_index import BlueprintTimestampIndex + from hathor.indexes.rocksdb_blueprint_history_index import RocksDBBlueprintHistoryIndex + from hathor.indexes.rocksdb_nc_history_index import RocksDBNCHistoryIndex + if self.nc_creation is None: + self.nc_creation = NCCreationIndex(self._db) + if self.nc_history is None: + self.nc_history = RocksDBNCHistoryIndex(self._db) + if self.blueprints is None: + self.blueprints = BlueprintTimestampIndex(self._db) + if self.blueprint_history is None: + self.blueprint_history = RocksDBBlueprintHistoryIndex(self._db) diff --git a/hathor/indexes/memory_address_index.py b/hathor/indexes/memory_address_index.py deleted file mode 100644 index 4360bda21..000000000 --- a/hathor/indexes/memory_address_index.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING, Iterable, Optional - -from structlog import get_logger - -from hathor.indexes.address_index import AddressIndex -from hathor.indexes.memory_tx_group_index import MemoryTxGroupIndex -from hathor.transaction import BaseTransaction - -if TYPE_CHECKING: # pragma: no cover - from hathor.pubsub import PubSubManager - -logger = get_logger() - - -class MemoryAddressIndex(MemoryTxGroupIndex[str], AddressIndex): - """ Index of inputs/outputs by address - """ - - def __init__(self, pubsub: Optional['PubSubManager'] = None) -> None: - super().__init__() - self.pubsub = pubsub - if self.pubsub: - self._subscribe_pubsub_events() - - def get_db_name(self) -> Optional[str]: - return None - - def _extract_keys(self, tx: BaseTransaction) -> Iterable[str]: - return tx.get_related_addresses() - - def add_tx(self, tx: BaseTransaction) -> None: - super().add_tx(tx) - self._publish_tx(tx) - - def get_from_address(self, address: str) -> list[bytes]: - return list(self._get_from_key(address)) - - def get_sorted_from_address(self, address: str, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: - return self._get_sorted_from_key(address, tx_start) - - def is_address_empty(self, address: str) -> bool: - return self._is_key_empty(address) diff --git a/hathor/indexes/memory_height_index.py b/hathor/indexes/memory_height_index.py deleted file mode 100644 index 18a0546ae..000000000 --- a/hathor/indexes/memory_height_index.py +++ /dev/null @@ -1,83 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Optional - -from hathor.conf.settings import HathorSettings -from hathor.indexes.height_index import HeightIndex, HeightInfo, IndexEntry - - -class MemoryHeightIndex(HeightIndex): - """Store the block hash for each given height - """ - - _index: list[IndexEntry] - - def __init__(self, *, settings: HathorSettings | None = None) -> None: - super().__init__(settings=settings) - self.force_clear() - - def get_db_name(self) -> Optional[str]: - return None - - def force_clear(self) -> None: - self._index = [self.get_genesis_block_entry()] - - def _add(self, height: int, block_hash: bytes, timestamp: int, *, can_reorg: bool) -> None: - if len(self._index) < height: - raise ValueError(f'parent hash required (current height: {len(self._index)}, new height: {height})') - elif len(self._index) == height: - self._index.append(IndexEntry(block_hash, timestamp)) - elif self._index[height].hash != block_hash: - if can_reorg: - del self._index[height:] - self._index.append(IndexEntry(block_hash, timestamp)) - else: - self.log.error( - 'adding would cause a re-org', - height=height, - current_block=self._index[height].hash.hex(), - new_block=block_hash.hex() - ) - raise ValueError('adding would cause a re-org, use can_reorg=True to accept re-orgs') - else: - # nothing to do (there are more blocks, but the block at height currently matches the added block) - pass - - def add_new(self, height: int, block_hash: bytes, timestamp: int) -> None: - self._add(height, block_hash, timestamp, can_reorg=False) - - def add_reorg(self, height: int, block_hash: bytes, timestamp: int) -> None: - self._add(height, block_hash, timestamp, can_reorg=True) - - def get(self, height: int) -> Optional[bytes]: - if len(self._index) <= height: - return None - return self._index[height].hash - - def get_tip(self) -> bytes: - return self._index[-1].hash - - def get_height_tip(self) -> HeightInfo: - height = len(self._index) - 1 - return HeightInfo(height, self._index[height].hash) - - def get_n_height_tips(self, n_blocks: int) -> list[HeightInfo]: - if n_blocks < 1: - raise ValueError('n_blocks must be a positive, non-zero, integer') - # highest height that is included, will be the first element - h_high = len(self._index) - 1 - # lowest height that is not included, -1 if it reaches the genesis - h_low = max(h_high - n_blocks, -1) - return [HeightInfo(h, self._index[h].hash) for h in range(h_high, h_low, -1)] diff --git a/hathor/indexes/memory_info_index.py b/hathor/indexes/memory_info_index.py index 656cc7972..d86d93589 100644 --- a/hathor/indexes/memory_info_index.py +++ b/hathor/indexes/memory_info_index.py @@ -14,6 +14,7 @@ from typing import TYPE_CHECKING, Optional +from hathor.conf.settings import HathorSettings from hathor.indexes.info_index import InfoIndex from hathor.transaction import BaseTransaction @@ -22,8 +23,8 @@ class MemoryInfoIndex(InfoIndex): - def __init__(self): - super().__init__() + def __init__(self, *, settings: HathorSettings) -> None: + super().__init__(settings=settings) self._block_count = 0 self._tx_count = 0 self._first_timestamp = 0 diff --git a/hathor/indexes/memory_mempool_tips_index.py b/hathor/indexes/memory_mempool_tips_index.py index 564ad3bf6..3373c59fa 100644 --- a/hathor/indexes/memory_mempool_tips_index.py +++ b/hathor/indexes/memory_mempool_tips_index.py @@ -16,6 +16,7 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.mempool_tips_index import ByteCollectionMempoolTipsIndex logger = get_logger() @@ -24,7 +25,8 @@ class MemoryMempoolTipsIndex(ByteCollectionMempoolTipsIndex): _index: set[bytes] - def __init__(self): + def __init__(self, *, settings: HathorSettings) -> None: + super().__init__(settings=settings) self.log = logger.new() self.force_clear() diff --git a/hathor/indexes/memory_timestamp_index.py b/hathor/indexes/memory_timestamp_index.py deleted file mode 100644 index a6c1c06a0..000000000 --- a/hathor/indexes/memory_timestamp_index.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Iterator, Optional - -from sortedcontainers import SortedKeyList -from structlog import get_logger - -from hathor.indexes.timestamp_index import RangeIdx, ScopeType, TimestampIndex -from hathor.indexes.utils import ( - TransactionIndexElement, - get_newer_sorted_key_list, - get_newest_sorted_key_list, - get_older_sorted_key_list, -) -from hathor.transaction import BaseTransaction - -logger = get_logger() - - -class MemoryTimestampIndex(TimestampIndex): - """ Index of transactions sorted by their timestamps. - """ - - _index: 'SortedKeyList[TransactionIndexElement]' - - def __init__(self, *, scope_type: ScopeType): - super().__init__(scope_type=scope_type) - self.log = logger.new() - self.force_clear() - - def get_db_name(self) -> Optional[str]: - return None - - def force_clear(self) -> None: - self._index = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) - - def add_tx(self, tx: BaseTransaction) -> bool: - # It is safe to use the in operator because it is O(log(n)). - # http://www.grantjenks.com/docs/sortedcontainers/sortedlist.html#sortedcontainers.SortedList.__contains__ - element = TransactionIndexElement(tx.timestamp, tx.hash) - if element in self._index: - return False - self._index.add(element) - return True - - def del_tx(self, tx: BaseTransaction) -> None: - idx = self._index.bisect_key_left((tx.timestamp, tx.hash)) - if idx < len(self._index) and self._index[idx].hash == tx.hash: - self._index.pop(idx) - - def get_newest(self, count: int) -> tuple[list[bytes], bool]: - return get_newest_sorted_key_list(self._index, count) - - def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: - return get_older_sorted_key_list(self._index, timestamp, hash_bytes, count) - - def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: - return get_newer_sorted_key_list(self._index, timestamp, hash_bytes, count) - - def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> tuple[list[bytes], Optional[RangeIdx]]: - timestamp, offset = from_idx - idx = self._index.bisect_key_left((timestamp, b'')) - txs = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) - txs.update(self._index[idx:idx+offset+count]) - ret_txs = txs[offset:offset+count] - hashes = [tx.hash for tx in ret_txs] - if len(ret_txs) < count: - return hashes, None - else: - next_offset = offset + count - next_timestamp = ret_txs[-1].timestamp - if next_timestamp != timestamp: - next_idx = txs.bisect_key_left((next_timestamp, b'')) - next_offset -= next_idx - return hashes, RangeIdx(next_timestamp, next_offset) - - def iter(self) -> Iterator[bytes]: - for element in self._index: - yield element.hash diff --git a/hathor/indexes/memory_tips_index.py b/hathor/indexes/memory_tips_index.py index 58c9f447a..b1b419bfa 100644 --- a/hathor/indexes/memory_tips_index.py +++ b/hathor/indexes/memory_tips_index.py @@ -18,6 +18,7 @@ from intervaltree import Interval, IntervalTree from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.tips_index import ScopeType, TipsIndex from hathor.transaction import BaseTransaction @@ -47,8 +48,8 @@ class MemoryTipsIndex(TipsIndex): # It is useful because the interval tree allows access only by the interval. tx_last_interval: dict[bytes, Interval] - def __init__(self, *, scope_type: ScopeType): - super().__init__(scope_type=scope_type) + def __init__(self, *, scope_type: ScopeType, settings: HathorSettings) -> None: + super().__init__(scope_type=scope_type, settings=settings) self.log = logger.new() self.tree = IntervalTree() self.tx_last_interval = {} diff --git a/hathor/indexes/memory_tokens_index.py b/hathor/indexes/memory_tokens_index.py deleted file mode 100644 index 74e5160af..000000000 --- a/hathor/indexes/memory_tokens_index.py +++ /dev/null @@ -1,201 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from collections import defaultdict -from typing import Iterator, Optional, cast - -from sortedcontainers import SortedKeyList -from structlog import get_logger - -from hathor.indexes.tokens_index import TokenIndexInfo, TokensIndex, TokenUtxoInfo -from hathor.indexes.utils import ( - TransactionIndexElement, - get_newer_sorted_key_list, - get_newest_sorted_key_list, - get_older_sorted_key_list, -) -from hathor.transaction import BaseTransaction, Transaction -from hathor.transaction.base_transaction import TxVersion -from hathor.util import is_token_uid_valid - -logger = get_logger() - - -class MemoryTokenIndexInfo(TokenIndexInfo): - _name: Optional[str] - _symbol: Optional[str] - _total: int - _mint: set[TokenUtxoInfo] - _melt: set[TokenUtxoInfo] - _transactions: 'SortedKeyList[TransactionIndexElement]' - - def __init__(self, name: Optional[str] = None, symbol: Optional[str] = None, total: int = 0, - mint: Optional[set[TokenUtxoInfo]] = None, melt: Optional[set[TokenUtxoInfo]] = None) -> None: - self._name = name - self._symbol = symbol - self._total = total - self._mint = mint or set() - self._melt = melt or set() - # Saves the (timestamp, hash) of the transactions that include this token - self._transactions = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) - - def get_name(self) -> Optional[str]: - return self._name - - def get_symbol(self) -> Optional[str]: - return self._symbol - - def get_total(self) -> int: - return self._total - - def iter_mint_utxos(self) -> Iterator[TokenUtxoInfo]: - yield from self._mint - - def iter_melt_utxos(self) -> Iterator[TokenUtxoInfo]: - yield from self._melt - - -class MemoryTokensIndex(TokensIndex): - def __init__(self) -> None: - self.log = logger.new() - self.force_clear() - - def get_db_name(self) -> Optional[str]: - return None - - def force_clear(self) -> None: - self._tokens: dict[bytes, MemoryTokenIndexInfo] = defaultdict(MemoryTokenIndexInfo) - - def _add_to_index(self, tx: BaseTransaction, index: int) -> None: - """ Add tx to mint/melt indexes and total amount - """ - - tx_output = tx.outputs[index] - token_uid = tx.get_token_uid(tx_output.get_token_index()) - - if tx_output.is_token_authority(): - if tx_output.can_mint_token(): - # add to mint index - self._tokens[token_uid]._mint.add(TokenUtxoInfo(tx.hash, index)) - if tx_output.can_melt_token(): - # add to melt index - self._tokens[token_uid]._melt.add(TokenUtxoInfo(tx.hash, index)) - else: - self._tokens[token_uid]._total += tx_output.value - - def _remove_from_index(self, tx: BaseTransaction, index: int) -> None: - """ Remove tx from mint/melt indexes and total amount - """ - - tx_output = tx.outputs[index] - token_uid = tx.get_token_uid(tx_output.get_token_index()) - - if tx_output.is_token_authority(): - if tx_output.can_mint_token(): - # remove from mint index - self._tokens[token_uid]._mint.discard(TokenUtxoInfo(tx.hash, index)) - if tx_output.can_melt_token(): - # remove from melt index - self._tokens[token_uid]._melt.discard(TokenUtxoInfo(tx.hash, index)) - else: - self._tokens[token_uid]._total -= tx_output.value - - def add_tx(self, tx: BaseTransaction) -> None: - for tx_input in tx.inputs: - spent_tx = tx.get_spent_tx(tx_input) - self._remove_from_index(spent_tx, tx_input.index) - - for index in range(len(tx.outputs)): - self._add_to_index(tx, index) - - # if it's a TokenCreationTransaction, update name and symbol - if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: - from hathor.transaction.token_creation_tx import TokenCreationTransaction - tx = cast(TokenCreationTransaction, tx) - status = self._tokens[tx.hash] - status._name = tx.token_name - status._symbol = tx.token_symbol - - if tx.is_transaction: - # Adding this tx to the transactions key list - assert isinstance(tx, Transaction) - for token_uid in tx.tokens: - transactions = self._tokens[token_uid]._transactions - # It is safe to use the in operator because it is O(log(n)). - # http://www.grantjenks.com/docs/sortedcontainers/sortedlist.html#sortedcontainers.SortedList.__contains__ - element = TransactionIndexElement(tx.timestamp, tx.hash) - if element in transactions: - return - transactions.add(element) - - def remove_tx(self, tx: BaseTransaction) -> None: - for tx_input in tx.inputs: - spent_tx = tx.get_spent_tx(tx_input) - self._add_to_index(spent_tx, tx_input.index) - - for index in range(len(tx.outputs)): - self._remove_from_index(tx, index) - - if tx.is_transaction: - # Removing this tx from the transactions key list - assert isinstance(tx, Transaction) - for token_uid in tx.tokens: - transactions = self._tokens[token_uid]._transactions - idx = transactions.bisect_key_left((tx.timestamp, tx.hash)) - if idx < len(transactions) and transactions[idx].hash == tx.hash: - transactions.pop(idx) - - # if it's a TokenCreationTransaction, remove it from index - if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: - del self._tokens[tx.hash] - - def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: - yield from self._tokens.items() - - def get_token_info(self, token_uid: bytes) -> TokenIndexInfo: - assert is_token_uid_valid(token_uid) - if token_uid not in self._tokens: - raise KeyError('unknown token') - info = self._tokens[token_uid] - return info - - def get_transactions_count(self, token_uid: bytes) -> int: - assert is_token_uid_valid(token_uid) - if token_uid not in self._tokens: - return 0 - info = self._tokens[token_uid] - return len(info._transactions) - - def get_newest_transactions(self, token_uid: bytes, count: int) -> tuple[list[bytes], bool]: - assert is_token_uid_valid(token_uid) - if token_uid not in self._tokens: - return [], False - transactions = self._tokens[token_uid]._transactions - return get_newest_sorted_key_list(transactions, count) - - def get_older_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> tuple[list[bytes], bool]: - assert is_token_uid_valid(token_uid) - if token_uid not in self._tokens: - return [], False - transactions = self._tokens[token_uid]._transactions - return get_older_sorted_key_list(transactions, timestamp, hash_bytes, count) - - def get_newer_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> tuple[list[bytes], bool]: - assert is_token_uid_valid(token_uid) - if token_uid not in self._tokens: - return [], False - transactions = self._tokens[token_uid]._transactions - return get_newer_sorted_key_list(transactions, timestamp, hash_bytes, count) diff --git a/hathor/indexes/memory_tx_group_index.py b/hathor/indexes/memory_tx_group_index.py deleted file mode 100644 index 99a679f21..000000000 --- a/hathor/indexes/memory_tx_group_index.py +++ /dev/null @@ -1,74 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from abc import abstractmethod -from collections import defaultdict -from typing import Iterable, Optional, Sized, TypeVar - -from structlog import get_logger - -from hathor.indexes.tx_group_index import TxGroupIndex -from hathor.transaction import BaseTransaction - -logger = get_logger() - -KT = TypeVar('KT', bound=Sized) - - -class MemoryTxGroupIndex(TxGroupIndex[KT]): - """Memory implementation of the TxGroupIndex. This class is abstract and cannot be used directly. - """ - - index: defaultdict[KT, set[tuple[int, bytes]]] - - def __init__(self) -> None: - self.force_clear() - - def force_clear(self) -> None: - self.index = defaultdict(set) - - def _add_tx(self, key: KT, tx: BaseTransaction) -> None: - self.index[key].add((tx.timestamp, tx.hash)) - - @abstractmethod - def _extract_keys(self, tx: BaseTransaction) -> Iterable[KT]: - """Extract the keys related to a given tx. The transaction will be added to all extracted keys.""" - raise NotImplementedError - - def add_tx(self, tx: BaseTransaction) -> None: - - for key in self._extract_keys(tx): - self._add_tx(key, tx) - - def remove_tx(self, tx: BaseTransaction) -> None: - - for key in self._extract_keys(tx): - self.index[key].discard((tx.timestamp, tx.hash)) - - def _get_from_key(self, key: KT) -> Iterable[bytes]: - for _, h in self.index[key]: - yield h - - def _get_sorted_from_key(self, key: KT, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: - sorted_elements = sorted(self.index[key]) - found = False - for _, h in sorted_elements: - if tx_start and h == tx_start.hash: - found = True - - if found or not tx_start: - yield h - - def _is_key_empty(self, key: KT) -> bool: - return not bool(self.index[key]) diff --git a/hathor/indexes/memory_utxo_index.py b/hathor/indexes/memory_utxo_index.py deleted file mode 100644 index ff1872800..000000000 --- a/hathor/indexes/memory_utxo_index.py +++ /dev/null @@ -1,134 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from collections import defaultdict -from dataclasses import dataclass, field -from typing import Iterator, NamedTuple, Optional, Union - -from sortedcontainers import SortedSet -from structlog import get_logger - -from hathor.indexes.utxo_index import UtxoIndex, UtxoIndexItem - -logger = get_logger() - - -class _IndexKey(NamedTuple): - token_uid: bytes - address: str - - -class _NoLockItem(NamedTuple): - amount: int - tx_id: bytes - # XXX: using idx instead of index because `def index` exists in parent class - idx: int - - -class _TimeLockItem(NamedTuple): - timelock: int - amount: int - tx_id: bytes - # XXX: using idx instead of index because `def index` exists in parent class - idx: int - - -class _HeightLockItem(NamedTuple): - heightlock: int - amount: int - tx_id: bytes - # XXX: using idx instead of index because `def index` exists in parent class - idx: int - - -@dataclass(frozen=True) -class _IndexItem: - nolock: 'SortedSet[_NoLockItem]' = field(default_factory=SortedSet) - timelock: 'SortedSet[_TimeLockItem]' = field(default_factory=SortedSet) - heightlock: 'SortedSet[_HeightLockItem]' = field(default_factory=SortedSet) - - -class MemoryUtxoIndex(UtxoIndex): - _index: defaultdict[_IndexKey, _IndexItem] - - def __init__(self): - super().__init__() - self._index = defaultdict(_IndexItem) - - def get_db_name(self) -> Optional[str]: - return None - - def force_clear(self) -> None: - self._index.clear() - - def _add_utxo(self, item: UtxoIndexItem) -> None: - self.log.debug('add utxo', item=item) - subindex = self._index[_IndexKey(item.token_uid, item.address)] - if item.timelock is not None: - subindex.timelock.add(_TimeLockItem(item.timelock, item.amount, item.tx_id, item.index)) - elif item.heightlock is not None: - subindex.heightlock.add(_HeightLockItem(item.heightlock, item.amount, item.tx_id, item.index)) - else: - subindex.nolock.add(_NoLockItem(item.amount, item.tx_id, item.index)) - - def _remove_utxo(self, item: UtxoIndexItem) -> None: - self.log.debug('del utxo', item=item) - subindex = self._index[_IndexKey(item.token_uid, item.address)] - if item.timelock is not None: - subindex.timelock.discard(_TimeLockItem(item.timelock, item.amount, item.tx_id, item.index)) - elif item.heightlock is not None: - subindex.heightlock.discard(_HeightLockItem(item.heightlock, item.amount, item.tx_id, item.index)) - else: - subindex.nolock.discard(_NoLockItem(item.amount, item.tx_id, item.index)) - - def _iter_utxos_nolock(self, *, token_uid: bytes, address: str, target_amount: int) -> Iterator[UtxoIndexItem]: - subindex = self._index[_IndexKey(token_uid, address)].nolock - # this will point to the next value that is equal or higher than target_amount - idx_next_amount = subindex.bisect((target_amount,)) + 1 - for i in subindex.islice(stop=idx_next_amount, reverse=True): - yield UtxoIndexItem(token_uid, i.tx_id, i.idx, address, i.amount, None, None) - - def _iter_utxos_timelock(self, *, token_uid: bytes, address: str, target_amount: int, - target_timestamp: Optional[int] = None) -> Iterator[UtxoIndexItem]: - import math - seek_timestamp: Union[int, float] - if target_timestamp is None: - seek_timestamp = math.inf - else: - seek_timestamp = target_timestamp - subindex = self._index[_IndexKey(token_uid, address)].timelock - # this will point to the next value that is equal or higher than target_amount - idx_next_amount = subindex.bisect((seek_timestamp, target_amount)) + 1 - for i in subindex.islice(stop=idx_next_amount, reverse=True): - # it might happen that the first one is out of the timestamp range - if i.timelock > seek_timestamp: - continue - yield UtxoIndexItem(token_uid, i.tx_id, i.idx, address, i.amount, i.timelock, None) - - def _iter_utxos_heightlock(self, *, token_uid: bytes, address: str, target_amount: int, - target_height: Optional[int] = None) -> Iterator[UtxoIndexItem]: - import math - seek_height: Union[int, float] - if target_height is None: - seek_height = math.inf - else: - seek_height = target_height - subindex = self._index[_IndexKey(token_uid, address)].heightlock - # this will point to the next value that is equal or higher than target_amount - idx_next_amount = subindex.bisect((seek_height, target_amount)) + 1 - for i in subindex.islice(stop=idx_next_amount, reverse=True): - # it might happen that the first one is out of the heightlock range - if i.heightlock > seek_height: - continue - yield UtxoIndexItem(token_uid, i.tx_id, i.idx, address, i.amount, None, i.heightlock) diff --git a/hathor/indexes/nc_creation_index.py b/hathor/indexes/nc_creation_index.py new file mode 100644 index 000000000..d60cd166a --- /dev/null +++ b/hathor/indexes/nc_creation_index.py @@ -0,0 +1,39 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.indexes.rocksdb_vertex_timestamp_index import RocksDBVertexTimestampIndex +from hathor.indexes.scope import Scope +from hathor.transaction import BaseTransaction, Transaction + +SCOPE = Scope( + include_blocks=False, + include_txs=True, + include_voided=True, +) + + +class NCCreationIndex(RocksDBVertexTimestampIndex): + """Index of Nano Contract creation txs sorted by their timestamps.""" + cf_name = b'nc-creation-index' + db_name = 'nc-creation' + + def get_scope(self) -> Scope: + return SCOPE + + def _should_add(self, tx: BaseTransaction) -> bool: + if not tx.is_nano_contract(): + return False + assert isinstance(tx, Transaction) + nano_header = tx.get_nano_header() + return nano_header.is_creating_a_new_contract() diff --git a/hathor/indexes/nc_history_index.py b/hathor/indexes/nc_history_index.py new file mode 100644 index 000000000..6099ccaa1 --- /dev/null +++ b/hathor/indexes/nc_history_index.py @@ -0,0 +1,90 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import Iterable, Optional + +from structlog import get_logger +from typing_extensions import override + +from hathor.indexes.scope import Scope +from hathor.indexes.tx_group_index import TxGroupIndex +from hathor.transaction import BaseTransaction, Transaction + +logger = get_logger() + +SCOPE = Scope( + include_blocks=False, + include_txs=True, + include_voided=True, +) + + +class NCHistoryIndex(TxGroupIndex[bytes]): + """Index of all transactions of a Nano Contract.""" + + def get_scope(self) -> Scope: + return SCOPE + + def init_loop_step(self, tx: BaseTransaction) -> None: + self.add_tx(tx) + + @abstractmethod + def add_tx(self, tx: BaseTransaction) -> None: + """Add tx to this index. + """ + raise NotImplementedError + + @abstractmethod + def remove_tx(self, tx: BaseTransaction) -> None: + """Remove tx from this index. + """ + raise NotImplementedError + + @override + def _extract_keys(self, tx: BaseTransaction) -> Iterable[bytes]: + if not tx.is_nano_contract(): + return + assert isinstance(tx, Transaction) + nano_header = tx.get_nano_header() + yield nano_header.get_contract_id() + + def get_sorted_from_contract_id(self, contract_id: bytes) -> Iterable[bytes]: + """Get a list of tx_ids sorted by timestamp for a given contract_id. + """ + return self._get_sorted_from_key(contract_id) + + def get_newest(self, contract_id: bytes) -> Iterable[bytes]: + """Get a list of tx_ids sorted by timestamp for a given contract_id starting from the newest. + """ + return self._get_sorted_from_key(contract_id, reverse=True) + + def get_older(self, contract_id: bytes, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + """Get a list of tx_ids sorted by timestamp for a given contract_id that are older than tx_start. + """ + return self._get_sorted_from_key(contract_id, tx_start=tx_start, reverse=True) + + def get_newer(self, contract_id: bytes, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + """Get a list of tx_ids sorted by timestamp for a given contract_id that are newer than tx_start. + """ + return self._get_sorted_from_key(contract_id, tx_start=tx_start) + + @abstractmethod + def get_transaction_count(self, contract_id: bytes) -> int: + """Get the count of transactions for the given contract_id.""" + raise NotImplementedError + + def get_last_tx_timestamp(self, contract_id: bytes) -> int | None: + """Get the timestamp of the last tx in the given contract_id, or None if it doesn't exist.""" + return self.get_latest_tx_timestamp(contract_id) diff --git a/hathor/indexes/partial_rocksdb_tips_index.py b/hathor/indexes/partial_rocksdb_tips_index.py index 4a0d83c6d..7eca9a7bd 100644 --- a/hathor/indexes/partial_rocksdb_tips_index.py +++ b/hathor/indexes/partial_rocksdb_tips_index.py @@ -18,6 +18,7 @@ from intervaltree import Interval, IntervalTree from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.memory_tips_index import MemoryTipsIndex from hathor.indexes.rocksdb_utils import RocksDBIndexUtils from hathor.indexes.tips_index import ScopeType @@ -61,8 +62,8 @@ class PartialRocksDBTipsIndex(MemoryTipsIndex, RocksDBIndexUtils): # It is useful because the interval tree allows access only by the interval. tx_last_interval: dict[bytes, Interval] - def __init__(self, db: 'rocksdb.DB', *, scope_type: ScopeType): - MemoryTipsIndex.__init__(self, scope_type=scope_type) + def __init__(self, db: 'rocksdb.DB', *, scope_type: ScopeType, settings: HathorSettings) -> None: + MemoryTipsIndex.__init__(self, scope_type=scope_type, settings=settings) self._name = scope_type.get_name() self.log = logger.new() # XXX: override MemoryTipsIndex logger so it shows the correct module RocksDBIndexUtils.__init__(self, db, f'tips-{self._name}'.encode()) diff --git a/hathor/indexes/rocksdb_address_index.py b/hathor/indexes/rocksdb_address_index.py index cd7f78096..6288a956c 100644 --- a/hathor/indexes/rocksdb_address_index.py +++ b/hathor/indexes/rocksdb_address_index.py @@ -16,6 +16,7 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.address_index import AddressIndex from hathor.indexes.rocksdb_tx_group_index import RocksDBTxGroupIndex from hathor.indexes.rocksdb_utils import RocksDBIndexUtils @@ -38,9 +39,10 @@ class RocksDBAddressIndex(RocksDBTxGroupIndex[str], AddressIndex, RocksDBIndexUt _KEY_SIZE = 34 - def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None, + def __init__(self, db: 'rocksdb.DB', *, settings: HathorSettings, cf_name: Optional[bytes] = None, pubsub: Optional['PubSubManager'] = None) -> None: RocksDBTxGroupIndex.__init__(self, db, cf_name or _CF_NAME_ADDRESS_INDEX) + AddressIndex.__init__(self, settings=settings) self.pubsub = pubsub if self.pubsub: @@ -64,7 +66,7 @@ def add_tx(self, tx: BaseTransaction) -> None: self._publish_tx(tx) def get_from_address(self, address: str) -> list[bytes]: - return list(self._get_from_key(address)) + return list(self._get_sorted_from_key(address)) def get_sorted_from_address(self, address: str, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: return self._get_sorted_from_key(address, tx_start) diff --git a/hathor/indexes/rocksdb_blueprint_history_index.py b/hathor/indexes/rocksdb_blueprint_history_index.py new file mode 100644 index 000000000..e833eb12f --- /dev/null +++ b/hathor/indexes/rocksdb_blueprint_history_index.py @@ -0,0 +1,42 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import rocksdb +from typing_extensions import override + +from hathor.indexes.blueprint_history_index import BlueprintHistoryIndex +from hathor.indexes.rocksdb_tx_group_index import RocksDBTxGroupIndex +from hathor.indexes.rocksdb_utils import RocksDBIndexUtils + +_CF_NAME_BLUEPRINT_HISTORY_INDEX = b'blueprint-history-index' +_DB_NAME: str = 'blueprint-history' + + +class RocksDBBlueprintHistoryIndex(RocksDBTxGroupIndex[bytes], BlueprintHistoryIndex, RocksDBIndexUtils): + _KEY_SIZE = 32 + + def __init__(self, db: rocksdb.DB) -> None: + RocksDBTxGroupIndex.__init__(self, db, _CF_NAME_BLUEPRINT_HISTORY_INDEX) + + @override + def _serialize_key(self, key: bytes) -> bytes: + return key + + @override + def _deserialize_key(self, key_bytes: bytes) -> bytes: + return key_bytes + + @override + def get_db_name(self) -> str | None: + return _DB_NAME diff --git a/hathor/indexes/rocksdb_height_index.py b/hathor/indexes/rocksdb_height_index.py index 562bbf43c..eee0470b3 100644 --- a/hathor/indexes/rocksdb_height_index.py +++ b/hathor/indexes/rocksdb_height_index.py @@ -16,6 +16,7 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.height_index import HeightIndex, HeightInfo, IndexEntry from hathor.indexes.rocksdb_utils import RocksDBIndexUtils @@ -42,9 +43,9 @@ class RocksDBHeightIndex(HeightIndex, RocksDBIndexUtils): It works nicely because rocksdb uses a tree sorted by key under the hood. """ - def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None) -> None: + def __init__(self, db: 'rocksdb.DB', *, settings: HathorSettings, cf_name: Optional[bytes] = None) -> None: self.log = logger.new() - HeightIndex.__init__(self) + HeightIndex.__init__(self, settings=settings) RocksDBIndexUtils.__init__(self, db, cf_name or _CF_NAME_HEIGHT_INDEX) def get_db_name(self) -> Optional[str]: diff --git a/hathor/indexes/rocksdb_info_index.py b/hathor/indexes/rocksdb_info_index.py index 6b6025146..093f13c83 100644 --- a/hathor/indexes/rocksdb_info_index.py +++ b/hathor/indexes/rocksdb_info_index.py @@ -16,6 +16,7 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.memory_info_index import MemoryInfoIndex from hathor.indexes.rocksdb_utils import RocksDBIndexUtils from hathor.transaction import BaseTransaction @@ -37,10 +38,10 @@ class RocksDBInfoIndex(MemoryInfoIndex, RocksDBIndexUtils): - def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None) -> None: + def __init__(self, db: 'rocksdb.DB', *, settings: HathorSettings, cf_name: Optional[bytes] = None) -> None: self.log = logger.new() RocksDBIndexUtils.__init__(self, db, cf_name or _CF_NAME_ADDRESS_INDEX) - MemoryInfoIndex.__init__(self) + MemoryInfoIndex.__init__(self, settings=settings) def init_start(self, indexes_manager: 'IndexesManager') -> None: self._load_all_values() diff --git a/hathor/indexes/rocksdb_mempool_tips_index.py b/hathor/indexes/rocksdb_mempool_tips_index.py index a2c6c7ffe..8ff96447c 100644 --- a/hathor/indexes/rocksdb_mempool_tips_index.py +++ b/hathor/indexes/rocksdb_mempool_tips_index.py @@ -16,6 +16,7 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.mempool_tips_index import ByteCollectionMempoolTipsIndex from hathor.indexes.rocksdb_utils import RocksDBSimpleSet @@ -31,7 +32,8 @@ class RocksDBMempoolTipsIndex(ByteCollectionMempoolTipsIndex): _index: RocksDBSimpleSet - def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None) -> None: + def __init__(self, db: 'rocksdb.DB', *, settings: HathorSettings, cf_name: Optional[bytes] = None) -> None: + super().__init__(settings=settings) self.log = logger.new() _cf_name = cf_name or _CF_NAME_MEMPOOL_TIPS_INDEX self._index = RocksDBSimpleSet(db, self.log, cf_name=_cf_name) diff --git a/hathor/indexes/rocksdb_nc_history_index.py b/hathor/indexes/rocksdb_nc_history_index.py new file mode 100644 index 000000000..eb968a8ea --- /dev/null +++ b/hathor/indexes/rocksdb_nc_history_index.py @@ -0,0 +1,53 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Optional + +from structlog import get_logger + +from hathor.indexes.nc_history_index import NCHistoryIndex +from hathor.indexes.rocksdb_tx_group_index import RocksDBTxGroupIndex +from hathor.indexes.rocksdb_utils import RocksDBIndexUtils + +if TYPE_CHECKING: # pragma: no cover + import rocksdb + +logger = get_logger() + +_CF_NAME_NC_HISTORY_INDEX = b'nc-history-index' +_CF_NAME_NC_HISTORY_INDEX_STATS = b'nc-history-index-stats' +_DB_NAME: str = 'nc-history' + + +class RocksDBNCHistoryIndex(RocksDBTxGroupIndex[bytes], NCHistoryIndex, RocksDBIndexUtils): + """RocksDB-persistent index of all transactions of a Nano Contract.""" + + _KEY_SIZE = 32 + + def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None) -> None: + RocksDBTxGroupIndex.__init__(self, db, cf_name or _CF_NAME_NC_HISTORY_INDEX, _CF_NAME_NC_HISTORY_INDEX_STATS) + + def _serialize_key(self, key: bytes) -> bytes: + return key + + def _deserialize_key(self, key_bytes: bytes) -> bytes: + return key_bytes + + def get_db_name(self) -> Optional[str]: + # XXX: we don't need it to be parametrizable, so this is fine + return _DB_NAME + + def get_transaction_count(self, contract_id: bytes) -> int: + assert self._stats is not None + return self._stats.get_group_count(contract_id) diff --git a/hathor/indexes/rocksdb_timestamp_index.py b/hathor/indexes/rocksdb_timestamp_index.py index c505820a3..6b0a04625 100644 --- a/hathor/indexes/rocksdb_timestamp_index.py +++ b/hathor/indexes/rocksdb_timestamp_index.py @@ -16,10 +16,11 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.rocksdb_utils import RocksDBIndexUtils, incr_key -from hathor.indexes.timestamp_index import RangeIdx, ScopeType, TimestampIndex +from hathor.indexes.timestamp_index import ScopeType, TimestampIndex from hathor.transaction import BaseTransaction -from hathor.util import collect_n, skip_n +from hathor.util import collect_n if TYPE_CHECKING: # pragma: no cover import rocksdb @@ -38,8 +39,8 @@ class RocksDBTimestampIndex(TimestampIndex, RocksDBIndexUtils): It works nicely because rocksdb uses a tree sorted by key under the hood. """ - def __init__(self, db: 'rocksdb.DB', *, scope_type: ScopeType): - TimestampIndex.__init__(self, scope_type=scope_type) + def __init__(self, db: 'rocksdb.DB', *, settings: HathorSettings, scope_type: ScopeType) -> None: + TimestampIndex.__init__(self, scope_type=scope_type, settings=settings) self._name = scope_type.get_name() self.log = logger.new() RocksDBIndexUtils.__init__(self, db, f'timestamp-sorted-{self._name}'.encode()) @@ -127,38 +128,14 @@ def get_newest(self, count: int) -> tuple[list[bytes], bool]: it = (x for _, x in self._iter(reverse=True)) return collect_n(it, count) - def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: + def get_older(self, timestamp: int, hash_bytes: bytes | None, count: int) -> tuple[list[bytes], bool]: it = (x for _, x in self._iter(timestamp, hash_bytes, reverse=True)) return collect_n(it, count) - def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: + def get_newer(self, timestamp: int, hash_bytes: bytes | None, count: int) -> tuple[list[bytes], bool]: it = (x for _, x in self._iter(timestamp, hash_bytes)) return collect_n(it, count) - def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> tuple[list[bytes], Optional[RangeIdx]]: - if count <= 0: - raise ValueError(f'count must be positive, got {count}') - timestamp, offset = from_idx - it = skip_n(self._iter(timestamp), offset) - hashes: list[bytes] = [] - n = count - next_timestamp = timestamp - next_offset = offset - while n > 0: - try: - timestamp, tx_hash = next(it) - except StopIteration: - return hashes, None - hashes.append(tx_hash) - if next_timestamp != timestamp: - # XXX: this is to match how the memory index works, it basically resets to 1, not 0 - next_offset = 1 - next_timestamp = timestamp - else: - next_offset += 1 - n -= 1 - return hashes, RangeIdx(next_timestamp, next_offset) - def iter(self) -> Iterator[bytes]: it = self._db.iterkeys(self._cf) it.seek_to_first() diff --git a/hathor/indexes/rocksdb_tokens_index.py b/hathor/indexes/rocksdb_tokens_index.py index 198e26463..e919afaff 100644 --- a/hathor/indexes/rocksdb_tokens_index.py +++ b/hathor/indexes/rocksdb_tokens_index.py @@ -17,8 +17,9 @@ from typing import TYPE_CHECKING, Iterator, NamedTuple, Optional, TypedDict, cast from structlog import get_logger +from typing_extensions import assert_never, override -from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HathorSettings from hathor.indexes.rocksdb_utils import ( InternalUid, RocksDBIndexUtils, @@ -27,6 +28,13 @@ to_internal_token_uid, ) from hathor.indexes.tokens_index import TokenIndexInfo, TokensIndex, TokenUtxoInfo +from hathor.nanocontracts.runner.types import UpdateAuthoritiesRecord, UpdateAuthoritiesRecordType +from hathor.nanocontracts.types import ( + NCAcquireAuthorityAction, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, +) from hathor.transaction import BaseTransaction, Transaction from hathor.transaction.base_transaction import TxVersion from hathor.util import collect_n, json_dumpb, json_loadb @@ -60,6 +68,8 @@ class _InfoDict(TypedDict): name: str symbol: str total: int + n_contracts_can_mint: int + n_contracts_can_melt: int class _TxIndex(NamedTuple): @@ -84,9 +94,9 @@ class RocksDBTokensIndex(TokensIndex, RocksDBIndexUtils): It works nicely because rocksdb uses a tree sorted by key under the hood. """ - def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None) -> None: - self._settings = get_global_settings() + def __init__(self, db: 'rocksdb.DB', *, settings: HathorSettings, cf_name: Optional[bytes] = None) -> None: self.log = logger.new() + TokensIndex.__init__(self, settings=settings) RocksDBIndexUtils.__init__(self, db, cf_name or _CF_NAME_TOKENS_INDEX) def get_db_name(self) -> Optional[str]: @@ -166,9 +176,23 @@ def _to_value_info(self, info: _InfoDict) -> bytes: return json_dumpb(info) def _from_value_info(self, value: bytes) -> _InfoDict: - return cast(_InfoDict, json_loadb(value)) - - def _create_token_info(self, token_uid: bytes, name: str, symbol: str, total: int = 0) -> None: + info = json_loadb(value) + if info.get('n_contracts_can_mint') is None: + assert info.get('n_contracts_can_melt') is None + info['n_contracts_can_mint'] = 0 + info['n_contracts_can_melt'] = 0 + + return cast(_InfoDict, info) + + def create_token_info( + self, + token_uid: bytes, + name: str, + symbol: str, + total: int = 0, + n_contracts_can_mint: int = 0, + n_contracts_can_melt: int = 0, + ) -> None: key = self._to_key_info(token_uid) old_value = self._db.get((self._cf, key)) assert old_value is None @@ -176,10 +200,28 @@ def _create_token_info(self, token_uid: bytes, name: str, symbol: str, total: in 'name': name, 'symbol': symbol, 'total': total, + 'n_contracts_can_mint': n_contracts_can_mint, + 'n_contracts_can_melt': n_contracts_can_melt, }) self._db.put((self._cf, key), value) - def _destroy_token(self, token_uid: bytes) -> None: + def create_token_info_from_contract( + self, + token_uid: bytes, + name: str, + symbol: str, + total: int = 0, + ) -> None: + self.create_token_info( + token_uid=token_uid, + name=name, + symbol=symbol, + total=total, + n_contracts_can_mint=1, + n_contracts_can_melt=1, + ) + + def destroy_token(self, token_uid: bytes) -> None: import rocksdb # a writebatch works similar to a "SQL transaction" in that if it fails, either all persist or none @@ -218,14 +260,15 @@ def _remove_authority_utxo(self, token_uid: bytes, tx_hash: bytes, index: int, * self._db.delete((self._cf, self._to_key_authority(token_uid, TokenUtxoInfo(tx_hash, index), is_mint=is_mint))) def _create_genesis_info(self) -> None: - self._create_token_info( + self.create_token_info( self._settings.HATHOR_TOKEN_UID, self._settings.HATHOR_TOKEN_NAME, self._settings.HATHOR_TOKEN_SYMBOL, self._settings.GENESIS_TOKENS, ) - def _add_to_total(self, token_uid: bytes, amount: int) -> None: + @override + def add_to_total(self, token_uid: bytes, amount: int) -> None: key_info = self._to_key_info(token_uid) old_value_info = self._db.get((self._cf, key_info)) if token_uid == self._settings.HATHOR_TOKEN_UID and old_value_info is None: @@ -237,18 +280,6 @@ def _add_to_total(self, token_uid: bytes, amount: int) -> None: new_value_info = self._to_value_info(dict_info) self._db.put((self._cf, key_info), new_value_info) - def _subtract_from_total(self, token_uid: bytes, amount: int) -> None: - key_info = self._to_key_info(token_uid) - old_value_info = self._db.get((self._cf, key_info)) - if token_uid == self._settings.HATHOR_TOKEN_UID and old_value_info is None: - self._create_genesis_info() - old_value_info = self._db.get((self._cf, key_info)) - assert old_value_info is not None - dict_info = self._from_value_info(old_value_info) - dict_info['total'] -= amount - new_value_info = self._to_value_info(dict_info) - self._db.put((self._cf, key_info), new_value_info) - def _add_utxo(self, tx: BaseTransaction, index: int) -> None: """ Add tx to mint/melt indexes and total amount """ @@ -263,7 +294,7 @@ def _add_utxo(self, tx: BaseTransaction, index: int) -> None: # add to melt index self._add_authority_utxo(token_uid, tx.hash, index, is_mint=False) else: - self._add_to_total(token_uid, tx_output.value) + self.add_to_total(token_uid, tx_output.value) def _remove_utxo(self, tx: BaseTransaction, index: int) -> None: """ Remove tx from mint/melt indexes and total amount @@ -280,7 +311,7 @@ def _remove_utxo(self, tx: BaseTransaction, index: int) -> None: # remove from melt index self._remove_authority_utxo(token_uid, tx.hash, index, is_mint=False) else: - self._subtract_from_total(token_uid, tx_output.value) + self.add_to_total(token_uid, -tx_output.value) def add_tx(self, tx: BaseTransaction) -> None: # if it's a TokenCreationTransaction, update name and symbol @@ -292,7 +323,7 @@ def add_tx(self, tx: BaseTransaction) -> None: key_info = self._to_key_info(tx.hash) token_info = self._db.get((self._cf, key_info)) if token_info is None: - self._create_token_info(tx.hash, tx.token_name, tx.token_symbol) + self.create_token_info(tx.hash, tx.token_name, tx.token_symbol) if tx.is_transaction: # Adding this tx to the transactions key list @@ -308,6 +339,25 @@ def add_tx(self, tx: BaseTransaction) -> None: self.log.debug('add utxo', tx=tx.hash_hex, index=index) self._add_utxo(tx, index) + # Handle actions from Nano Contracts. + if tx.is_nano_contract(): + assert isinstance(tx, Transaction) + nano_header = tx.get_nano_header() + ctx = nano_header.get_context() + for action in ctx.__all_actions__: + match action: + case NCDepositAction(): + self.add_to_total(action.token_uid, action.amount) + case NCWithdrawalAction(): + self.add_to_total(action.token_uid, -action.amount) + case NCGrantAuthorityAction() | NCAcquireAuthorityAction(): + # These actions don't affect the token balance but do affect the counters + # of contracts holding token authorities. They are handled directly by + # the IndexesManager via index update records created by the Runner. + pass + case _: + assert_never(action) + def remove_tx(self, tx: BaseTransaction) -> None: for tx_input in tx.inputs: spent_tx = tx.get_spent_tx(tx_input) @@ -324,7 +374,25 @@ def remove_tx(self, tx: BaseTransaction) -> None: # if it's a TokenCreationTransaction, remove it from index if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: - self._destroy_token(tx.hash) + self.destroy_token(tx.hash) + + # Handle actions from Nano Contracts. + if tx.is_nano_contract(): + assert isinstance(tx, Transaction) + nano_header = tx.get_nano_header() + ctx = nano_header.get_context() + for action in ctx.__all_actions__: + match action: + case NCDepositAction(): + self.add_to_total(action.token_uid, -action.amount) + case NCWithdrawalAction(): + self.add_to_total(action.token_uid, action.amount) + case NCGrantAuthorityAction() | NCAcquireAuthorityAction(): + # These actions don't affect the nc token balance, + # so no need for any special handling on the index. + pass + case _: + assert_never(action) def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: self.log.debug('seek to start') @@ -349,6 +417,36 @@ def get_token_info(self, token_uid: bytes) -> TokenIndexInfo: info = self._from_value_info(value) return RocksDBTokenIndexInfo(self, token_uid, info) + @override + def update_authorities_from_contract(self, record: UpdateAuthoritiesRecord, undo: bool = False) -> None: + assert record.token_uid != self._settings.HATHOR_TOKEN_UID + key_info = self._to_key_info(record.token_uid) + old_value_info = self._db.get((self._cf, key_info)) + assert old_value_info is not None + dict_info = self._from_value_info(old_value_info) + + increment: int + match record.sub_type: + case UpdateAuthoritiesRecordType.GRANT: + increment = 1 + case UpdateAuthoritiesRecordType.REVOKE: + increment = -1 + case _: + assert_never(record.sub_type) + + if undo: + increment *= -1 + + if record.mint: + dict_info['n_contracts_can_mint'] += increment + if record.melt: + dict_info['n_contracts_can_melt'] += increment + + assert dict_info['n_contracts_can_mint'] >= 0 + assert dict_info['n_contracts_can_melt'] >= 0 + new_value_info = self._to_value_info(dict_info) + self._db.put((self._cf, key_info), new_value_info) + def _iter_transactions(self, token_uid: bytes, from_tx: Optional[_TxIndex] = None, *, reverse: bool = False) -> Iterator[bytes]: """ Iterate over all transactions of a token, by default from oldest to newest. @@ -439,3 +537,11 @@ def iter_mint_utxos(self) -> Iterator[TokenUtxoInfo]: def iter_melt_utxos(self) -> Iterator[TokenUtxoInfo]: return self._iter_authority_utxos(is_mint=False) + + @override + def can_mint(self) -> bool: + return any(self.iter_mint_utxos()) or self._info['n_contracts_can_mint'] > 0 + + @override + def can_melt(self) -> bool: + return any(self.iter_melt_utxos()) or self._info['n_contracts_can_melt'] > 0 diff --git a/hathor/indexes/rocksdb_tx_group_index.py b/hathor/indexes/rocksdb_tx_group_index.py index f640fbafa..611e8c75e 100644 --- a/hathor/indexes/rocksdb_tx_group_index.py +++ b/hathor/indexes/rocksdb_tx_group_index.py @@ -13,21 +13,56 @@ # limitations under the License. from abc import abstractmethod -from typing import TYPE_CHECKING, Iterable, Optional, Sized, TypeVar +from typing import Callable, Iterator, Optional, Sized, TypeVar +import rocksdb from structlog import get_logger +from typing_extensions import override -from hathor.indexes.rocksdb_utils import RocksDBIndexUtils +from hathor.indexes.rocksdb_utils import RocksDBIndexUtils, incr_key from hathor.indexes.tx_group_index import TxGroupIndex from hathor.transaction import BaseTransaction - -if TYPE_CHECKING: # pragma: no cover - import rocksdb +from hathor.transaction.util import bytes_to_int, int_to_bytes logger = get_logger() KT = TypeVar('KT', bound=Sized) +GROUP_COUNT_VALUE_SIZE = 4 # in bytes + + +class _RocksDBTxGroupStatsIndex(RocksDBIndexUtils): + def __init__( + self, + db: rocksdb.DB, + cf_name: bytes, + serialize_key: Callable[[KT], bytes], + ) -> None: + self.log = logger.new() + super().__init__(db, cf_name) + self._serialize_key = serialize_key + + def increase_group_count(self, key: KT) -> None: + """Increase the group count for the provided key.""" + self._increment_group_count(key, amount=1) + + def decrease_group_count(self, key: KT) -> None: + """Decrease the group count for the provided key.""" + self._increment_group_count(key, amount=-1) + + def _increment_group_count(self, key: KT, *, amount: int) -> None: + """Increment the group count for the provided key with the provided amount.""" + count_key = self._serialize_key(key) + count = self.get_group_count(key) + new_count_bytes = int_to_bytes(number=count + amount, size=GROUP_COUNT_VALUE_SIZE) + self._db.put((self._cf, count_key), new_count_bytes) + + def get_group_count(self, key: KT) -> int: + """Return the group count for the provided key.""" + count_key = self._serialize_key(key) + count_bytes = self._db.get((self._cf, count_key)) or b'' + return bytes_to_int(count_bytes) + class RocksDBTxGroupIndex(TxGroupIndex[KT], RocksDBIndexUtils): """RocksDB implementation of the TxGroupIndex. This class is abstract and cannot be used directly. @@ -46,13 +81,15 @@ class RocksDBTxGroupIndex(TxGroupIndex[KT], RocksDBIndexUtils): """ _KEY_SIZE: int - _CF_NAME: bytes - def __init__(self, db: 'rocksdb.DB', cf_name: bytes) -> None: + def __init__(self, db: rocksdb.DB, cf_name: bytes, stats_cf_name: bytes | None = None) -> None: self.log = logger.new() RocksDBIndexUtils.__init__(self, db, cf_name) + self._stats = _RocksDBTxGroupStatsIndex(db, stats_cf_name, self._serialize_key) if stats_cf_name else None def force_clear(self) -> None: + if self._stats: + self._stats.clear() self.clear() @abstractmethod @@ -65,11 +102,6 @@ def _deserialize_key(self, _bytes: bytes) -> KT: """Deserialize RocksDB's key.""" raise NotImplementedError - @abstractmethod - def _extract_keys(self, tx: BaseTransaction) -> Iterable[KT]: - """Extract the keys related to a given tx. The transaction will be added to all extracted keys.""" - raise NotImplementedError - def _to_rocksdb_key(self, key: KT, tx: Optional[BaseTransaction] = None) -> bytes: import struct rocksdb_key = self._serialize_key(key) @@ -94,24 +126,44 @@ def _from_rocksdb_key(self, rocksdb_key: bytes) -> tuple[KT, int, bytes]: def add_tx(self, tx: BaseTransaction) -> None: for key in self._extract_keys(tx): - self.log.debug('put key', key=key) - self._db.put((self._cf, self._to_rocksdb_key(key, tx)), b'') + self.add_single_key(key, tx) + + def add_single_key(self, key: KT, tx: BaseTransaction) -> None: + self.log.debug('put key', key=key) + internal_key = self._to_rocksdb_key(key, tx) + if self._db.get((self._cf, internal_key)) is not None: + return + self._db.put((self._cf, internal_key), b'') + if self._stats: + self._stats.increase_group_count(key) def remove_tx(self, tx: BaseTransaction) -> None: for key in self._extract_keys(tx): - self.log.debug('delete key', key=key) - self._db.delete((self._cf, self._to_rocksdb_key(key, tx))) - - def _get_from_key(self, key: KT) -> Iterable[bytes]: - return self._util_get_from_key(key) - - def _get_sorted_from_key(self, key: KT, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: - return self._util_get_from_key(key, tx_start) - - def _util_get_from_key(self, key: KT, tx: Optional[BaseTransaction] = None) -> Iterable[bytes]: + self.remove_single_key(key, tx) + + def remove_single_key(self, key: KT, tx: BaseTransaction) -> None: + self.log.debug('delete key', key=key) + internal_key = self._to_rocksdb_key(key, tx) + if self._db.get((self._cf, internal_key)) is None: + return + self._db.delete((self._cf, internal_key)) + if self._stats: + self._stats.decrease_group_count(key) + + def _get_sorted_from_key( + self, + key: KT, + tx_start: Optional[BaseTransaction] = None, + reverse: bool = False + ) -> Iterator[bytes]: self.log.debug('seek to', key=key) it = self._db.iterkeys(self._cf) - it.seek(self._to_rocksdb_key(key, tx)) + if reverse: + it = reversed(it) + # when reversed we increment the key by 1, which effectively goes to the end of a prefix + it.seek_for_prev(incr_key(self._to_rocksdb_key(key, tx_start))) + else: + it.seek(self._to_rocksdb_key(key, tx_start)) for _cf, rocksdb_key in it: key2, _, tx_hash = self._from_rocksdb_key(rocksdb_key) if key2 != key: @@ -136,3 +188,19 @@ def _is_key_empty(self, key: KT) -> bool: is_empty = key2 != key self.log.debug('seek empty', is_empty=is_empty) return is_empty + + @override + def get_latest_tx_timestamp(self, key: KT) -> int | None: + it = self._db.iterkeys(self._cf) + it = reversed(it) + # when reversed we increment the key by 1, which effectively goes to the end of a prefix + it.seek_for_prev(incr_key(self._to_rocksdb_key(key))) + try: + _cf, rocksdb_key = next(it) + except StopIteration: + return None + key2, tx_timestamp, _ = self._from_rocksdb_key(rocksdb_key) + if key2 != key: + return None + assert key2 == key + return tx_timestamp diff --git a/hathor/indexes/rocksdb_utils.py b/hathor/indexes/rocksdb_utils.py index 431bfc2f6..c6f372136 100644 --- a/hathor/indexes/rocksdb_utils.py +++ b/hathor/indexes/rocksdb_utils.py @@ -120,6 +120,16 @@ def _clone_into_dict(self) -> dict[bytes, bytes]: it.seek_to_first() return {k: v for (_, k), v in it} + def get_all_internal(self) -> Iterable[bytes]: + """ + Return all internal content of this index, sorted — that is, its rocksdb keys. + Mostly useful for comparing different index instances in tests. + """ + it = self._db.iterkeys(self._cf) + it.seek_to_first() + for _cf, rocksdb_key in it: + yield rocksdb_key + class RocksDBSimpleSet(Collection[bytes], RocksDBIndexUtils): def __init__(self, db: 'rocksdb.DB', log: 'structlog.stdlib.BoundLogger', *, cf_name: bytes) -> None: diff --git a/hathor/indexes/rocksdb_utxo_index.py b/hathor/indexes/rocksdb_utxo_index.py index 923530ffb..5aa44c93b 100644 --- a/hathor/indexes/rocksdb_utxo_index.py +++ b/hathor/indexes/rocksdb_utxo_index.py @@ -19,6 +19,7 @@ from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.crypto.util import decode_address, get_address_b58_from_bytes from hathor.indexes.rocksdb_utils import InternalUid, RocksDBIndexUtils, from_internal_token_uid, to_internal_token_uid from hathor.indexes.utxo_index import UtxoIndex, UtxoIndexItem @@ -311,8 +312,8 @@ class RocksDBUtxoIndex(UtxoIndex, RocksDBIndexUtils): It works nicely because rocksdb uses a tree sorted by key under the hood. """ - def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None) -> None: - super().__init__() + def __init__(self, db: 'rocksdb.DB', *, settings: HathorSettings, cf_name: Optional[bytes] = None) -> None: + super().__init__(settings=settings) self.log = logger.new() RocksDBIndexUtils.__init__(self, db, cf_name or _CF_NAME_UTXO_INDEX) diff --git a/hathor/indexes/rocksdb_vertex_timestamp_index.py b/hathor/indexes/rocksdb_vertex_timestamp_index.py new file mode 100644 index 000000000..6fae6bf9b --- /dev/null +++ b/hathor/indexes/rocksdb_vertex_timestamp_index.py @@ -0,0 +1,131 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct +from abc import ABC +from typing import Iterator, final + +import rocksdb +from structlog import get_logger +from typing_extensions import override + +from hathor.indexes.rocksdb_utils import RocksDBIndexUtils, incr_key +from hathor.indexes.vertex_timestamp_index import VertexTimestampIndex +from hathor.transaction import BaseTransaction, Vertex + +logger = get_logger() + + +class RocksDBVertexTimestampIndex(VertexTimestampIndex, RocksDBIndexUtils, ABC): + cf_name: bytes + db_name: str + + """ + This index uses the following key format: + + key = [tx.timestamp][tx.hash] + |--4 bytes---||--32b--| + + It works nicely because rocksdb uses a tree sorted by key under the hood. + """ + + def __init__(self, db: rocksdb.DB) -> None: + self.log = logger.new() + RocksDBIndexUtils.__init__(self, db, self.cf_name) + + @final + @override + def get_db_name(self) -> str | None: + return self.db_name + + @final + @override + def force_clear(self) -> None: + self.clear() + + @staticmethod + @final + def _to_key(vertex: Vertex) -> bytes: + """Make a key for a vertex.""" + key = bytearray() + key.extend(struct.pack('>I', vertex.timestamp)) + assert len(vertex.hash) == 32 + key.extend(vertex.hash) + assert len(key) == 4 + 32 + return bytes(key) + + @staticmethod + @final + def _from_key(key: bytes) -> tuple[int, bytes]: + """Parse a key on the column-family.""" + assert len(key) == 4 + 32 + timestamp: int + (timestamp,) = struct.unpack('>I', key[:4]) + tx_hash = key[4:] + assert len(tx_hash) == 32 + return timestamp, tx_hash + + @final + @override + def _add_tx(self, tx: BaseTransaction) -> None: + key = self._to_key(tx) + self.log.debug('put key', key=key) + self._db.put((self._cf, key), b'') + + @final + @override + def del_tx(self, tx: BaseTransaction) -> None: + key = self._to_key(tx) + self.log.debug('delete key', key=key) + self._db.delete((self._cf, key)) + + @final + @override + def _iter_sorted( + self, + *, + tx_start: BaseTransaction | None, + reverse: bool, + inclusive: bool = False, + ) -> Iterator[bytes]: + it = self._db.iterkeys(self._cf) + if reverse: + it = reversed(it) + if tx_start is None: + self.log.debug('seek to last') + it.seek_to_last() + else: + # when reversed we increment the key by 1, which effectively goes to the end of a prefix + self.log.debug('seek to', tx=tx_start) + it.seek_for_prev(incr_key(self._to_key(tx_start))) + else: + if tx_start is None: + self.log.debug('seek to first') + it.seek_to_first() + else: + self.log.debug('seek to', tx=tx_start) + it.seek(self._to_key(tx_start)) + + it = (self._from_key(key) for _cf, key in it) + try: + _timestamp, first_tx_hash = next(it) + except StopIteration: + return + if inclusive or not tx_start or tx_start.hash != first_tx_hash: + yield first_tx_hash + + for _timestamp, tx_hash in it: + self.log.debug('seek found', tx=tx_hash.hex()) + yield tx_hash + self.log.debug('seek end') diff --git a/hathor/indexes/timestamp_index.py b/hathor/indexes/timestamp_index.py index 76d15a1d7..58032bf36 100644 --- a/hathor/indexes/timestamp_index.py +++ b/hathor/indexes/timestamp_index.py @@ -14,10 +14,11 @@ from abc import abstractmethod from enum import Enum -from typing import Iterator, NamedTuple, Optional +from typing import Iterator, NamedTuple from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction @@ -55,7 +56,8 @@ class TimestampIndex(BaseIndex): """ Index of transactions sorted by their timestamps. """ - def __init__(self, *, scope_type: ScopeType): + def __init__(self, *, scope_type: ScopeType, settings: HathorSettings) -> None: + super().__init__(settings=settings) self._scope_type = scope_type def get_scope(self) -> Scope: @@ -91,7 +93,7 @@ def get_newest(self, count: int) -> tuple[list[bytes], bool]: raise NotImplementedError @abstractmethod - def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: + def get_older(self, timestamp: int, hash_bytes: bytes | None, count: int) -> tuple[list[bytes], bool]: """ Get transactions or blocks from the timestamp/hash_bytes reference to the oldest :param timestamp: Timestamp reference to start the search @@ -102,7 +104,7 @@ def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list raise NotImplementedError @abstractmethod - def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: + def get_newer(self, timestamp: int, hash_bytes: bytes | None, count: int) -> tuple[list[bytes], bool]: """ Get transactions or blocks from the timestamp/hash_bytes reference to the newest :param timestamp: Timestamp reference to start the search @@ -112,12 +114,6 @@ def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list """ raise NotImplementedError - @abstractmethod - def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> tuple[list[bytes], Optional[RangeIdx]]: - """ Get up to count hashes if available and the next range-index, this is used by sync-v1. - """ - raise NotImplementedError - @abstractmethod def iter(self) -> Iterator[bytes]: """ Iterate over the transactions in the index order, that is, sorted by timestamp. diff --git a/hathor/indexes/tips_index.py b/hathor/indexes/tips_index.py index 992745b52..6472d6301 100644 --- a/hathor/indexes/tips_index.py +++ b/hathor/indexes/tips_index.py @@ -18,6 +18,7 @@ from intervaltree import Interval from structlog import get_logger +from hathor.conf.settings import HathorSettings from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction @@ -60,7 +61,8 @@ class TipsIndex(BaseIndex): TODO Use an interval tree stored in disk, possibly using a B-tree. """ - def __init__(self, *, scope_type: ScopeType): + def __init__(self, *, scope_type: ScopeType, settings: HathorSettings) -> None: + super().__init__(settings=settings) self._scope_type = scope_type def get_scope(self) -> Scope: diff --git a/hathor/indexes/tokens_index.py b/hathor/indexes/tokens_index.py index 4c958e869..c685fe38d 100644 --- a/hathor/indexes/tokens_index.py +++ b/hathor/indexes/tokens_index.py @@ -12,13 +12,18 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from abc import ABC, abstractmethod -from typing import Iterator, NamedTuple, Optional +from typing import TYPE_CHECKING, Iterator, NamedTuple, Optional from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction +if TYPE_CHECKING: + from hathor.nanocontracts.runner.types import UpdateAuthoritiesRecord + SCOPE = Scope( include_blocks=False, include_txs=True, @@ -64,6 +69,16 @@ def iter_melt_utxos(self) -> Iterator[TokenUtxoInfo]: """Iterate over melt-authority UTXOs""" raise NotImplementedError + @abstractmethod + def can_mint(self) -> bool: + """Return whether this token can be minted, that is, whether any UTXO or contract holds a mint authority.""" + raise NotImplementedError + + @abstractmethod + def can_melt(self) -> bool: + """Return whether this token can be melted, that is, whether any UTXO or contract holds a melt authority.""" + raise NotImplementedError + class TokensIndex(BaseIndex): """ Index of tokens by token uid @@ -114,6 +129,42 @@ def get_token_info(self, token_uid: bytes) -> TokenIndexInfo: """ raise NotImplementedError + @abstractmethod + def create_token_info( + self, + token_uid: bytes, + name: str, + symbol: str, + total: int = 0, + n_contracts_can_mint: int = 0, + n_contracts_can_melt: int = 0, + ) -> None: + """Create a token info for a new token.""" + raise NotImplementedError + + @abstractmethod + def create_token_info_from_contract( + self, + token_uid: bytes, + name: str, + symbol: str, + total: int = 0, + ) -> None: + """Create a token info for a new token created in a contract.""" + raise NotImplementedError + + @abstractmethod + def destroy_token(self, token_uid: bytes) -> None: + """Destroy a token.""" + raise NotImplementedError + + @abstractmethod + def update_authorities_from_contract(self, record: UpdateAuthoritiesRecord, undo: bool = False) -> None: + """ + Handle an UpdateAuthoritiesRecord by incrementing/decrementing the counters of contracts holding authorities. + """ + raise NotImplementedError + @abstractmethod def get_transactions_count(self, token_uid: bytes) -> int: """ Get quantity of transactions from requested token @@ -139,3 +190,8 @@ def get_newer_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: b """ Get transactions from the timestamp/hash_bytes reference to the newest """ raise NotImplementedError + + @abstractmethod + def add_to_total(self, token_uid: bytes, amount: int) -> None: + """Add an amount to the total of `token_uid`. The amount may be negative.""" + raise NotImplementedError diff --git a/hathor/indexes/tx_group_index.py b/hathor/indexes/tx_group_index.py index 139245fe9..810cafdf7 100644 --- a/hathor/indexes/tx_group_index.py +++ b/hathor/indexes/tx_group_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import Generic, Iterable, Optional, Sized, TypeVar +from typing import Generic, Iterable, Iterator, Optional, Sized, TypeVar from structlog import get_logger @@ -44,16 +44,21 @@ def remove_tx(self, tx: BaseTransaction) -> None: raise NotImplementedError @abstractmethod - def _get_from_key(self, key: KT) -> Iterable[bytes]: - """Get all transactions that have a given key.""" + def _extract_keys(self, tx: BaseTransaction) -> Iterable[KT]: + """Extract the keys related to a given tx. The transaction will be added to all extracted keys.""" raise NotImplementedError @abstractmethod - def _get_sorted_from_key(self, key: KT, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: + def _get_sorted_from_key(self, + key: KT, + tx_start: Optional[BaseTransaction] = None, + reverse: bool = False) -> Iterator[bytes]: """Get all transactions that have a given key, sorted by timestamp. `tx_start` serves as a pagination marker, indicating the starting position for the iteration. When tx_start is None, the iteration begins from the initial element. + + `reverse` is used to get the list in the reverse order """ raise NotImplementedError @@ -61,3 +66,18 @@ def _get_sorted_from_key(self, key: KT, tx_start: Optional[BaseTransaction] = No def _is_key_empty(self, key: KT) -> bool: """Check whether a key is empty.""" raise NotImplementedError + + @abstractmethod + def get_latest_tx_timestamp(self, key: KT) -> int | None: + """Get the timestamp of the latest tx in the given key, or None if the key is not found.""" + raise NotImplementedError + + @abstractmethod + def add_single_key(self, key: KT, tx: BaseTransaction) -> None: + """Add a single key to the index.""" + raise NotImplementedError + + @abstractmethod + def remove_single_key(self, key: KT, tx: BaseTransaction) -> None: + """Remove a single key from the index.""" + raise NotImplementedError diff --git a/hathor/indexes/utils.py b/hathor/indexes/utils.py deleted file mode 100644 index 949e59c0e..000000000 --- a/hathor/indexes/utils.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import NamedTuple - -from sortedcontainers import SortedKeyList -from structlog import get_logger - -logger = get_logger() - - -class TransactionIndexElement(NamedTuple): - timestamp: int - hash: bytes - - -def get_newest_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement]', count: int - ) -> tuple[list[bytes], bool]: - """ Get newest data from a sorted key list - Return the elements (quantity is the 'count' parameter) and a boolean indicating if has more - """ - if count < 0: - raise ValueError(f'count must be non-negative, got {count}') - # XXX: count=0 is supported, this if prevents doing key_list[-0:] which would return all transactions - if count == 0: - return [], False - newest = key_list[-count:] - newest.reverse() - if count >= len(key_list): - has_more = False - else: - has_more = True - return [tx_index.hash for tx_index in newest], has_more - - -def get_older_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement]', timestamp: int, - hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: - """ Get sorted key list data from the timestamp/hash_bytes reference to the oldest - Return the elements (quantity is the 'count' parameter) and a boolean indicating if has more - """ - if count < 0: - raise ValueError(f'count must be non-negative, got {count}') - # Get idx of element - idx = key_list.bisect_key_left((timestamp, hash_bytes)) - first_idx = max(0, idx - count) - txs = key_list[first_idx:idx] - # Reverse because we want the newest first - txs.reverse() - return [tx_index.hash for tx_index in txs], first_idx > 0 - - -def get_newer_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement]', timestamp: int, - hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: - """ Get sorted key list data from the timestamp/hash_bytes reference to the newest - Return the elements (quantity is the 'count' parameter) and a boolean indicating if has more - """ - if count < 0: - raise ValueError(f'count must be non-negative, got {count}') - # Get idx of element - idx = key_list.bisect_key_left((timestamp, hash_bytes)) - last_idx = min(len(key_list), idx + 1 + count) - txs = key_list[idx + 1:last_idx] - # Reverse because we want the newest first - txs.reverse() - return [tx_index.hash for tx_index in txs], last_idx < len(key_list) diff --git a/hathor/indexes/utxo_index.py b/hathor/indexes/utxo_index.py index bfdc0df78..8b5dcde93 100644 --- a/hathor/indexes/utxo_index.py +++ b/hathor/indexes/utxo_index.py @@ -19,6 +19,7 @@ from structlog import get_logger from hathor.conf.get_settings import get_global_settings +from hathor.conf.settings import HathorSettings from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction, Block, TxOutput @@ -107,7 +108,8 @@ class UtxoIndex(BaseIndex): address can be extracted from. """ - def __init__(self): + def __init__(self, *, settings: HathorSettings) -> None: + super().__init__(settings=settings) self.log = logger.new() # interface methods provided by the base class diff --git a/hathor/indexes/vertex_timestamp_index.py b/hathor/indexes/vertex_timestamp_index.py new file mode 100644 index 000000000..0c2c845c0 --- /dev/null +++ b/hathor/indexes/vertex_timestamp_index.py @@ -0,0 +1,96 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Iterator, final + +from typing_extensions import override + +from hathor.indexes.base_index import BaseIndex +from hathor.transaction import BaseTransaction + + +class VertexTimestampIndex(BaseIndex, ABC): + """This is an abstract index to easily sort a certain type of vertex by its timestamp.""" + # TODO: Update the TimestampIndex to use this abstraction. Maybe the TxGroupIndex could be adapted too. + + @final + @override + def init_loop_step(self, tx: BaseTransaction) -> None: + self.add_tx(tx) + + @abstractmethod + def _should_add(self, tx: BaseTransaction) -> bool: + """Return whether a tx should be added to this index.""" + raise NotImplementedError + + @final + def add_tx(self, tx: BaseTransaction) -> None: + """Add a tx to this index.""" + if self._should_add(tx): + self._add_tx(tx) + + @final + def manually_add_tx(self, tx: BaseTransaction) -> None: + self._add_tx(tx) + + @abstractmethod + def _add_tx(self, tx: BaseTransaction) -> None: + """Internal method to actually add a tx to this index.""" + raise NotImplementedError + + @abstractmethod + def del_tx(self, tx: BaseTransaction) -> None: + """Delete a tx from this index.""" + raise NotImplementedError + + @final + def get_newest(self) -> Iterator[bytes]: + """Get tx ids from newest to oldest.""" + return self._iter_sorted(tx_start=None, reverse=True) + + @final + def get_oldest(self) -> Iterator[bytes]: + """Get tx ids from oldest to newest.""" + return self._iter_sorted(tx_start=None, reverse=False) + + @final + def get_older(self, *, tx_start: BaseTransaction, inclusive: bool = False) -> Iterator[bytes]: + """ + Get tx ids sorted by timestamp that are older than `tx_start`. + The `inclusive` param sets whether `tx_start` should be included. + """ + return self._iter_sorted(tx_start=tx_start, reverse=True, inclusive=inclusive) + + @final + def get_newer(self, *, tx_start: BaseTransaction, inclusive: bool = False) -> Iterator[bytes]: + """ + Get tx ids sorted by timestamp that are newer than `tx_start`. + The `inclusive` param sets whether `tx_start` should be included. + """ + return self._iter_sorted(tx_start=tx_start, reverse=False, inclusive=inclusive) + + @abstractmethod + def _iter_sorted( + self, + *, + tx_start: BaseTransaction | None, + reverse: bool, + inclusive: bool = False, + ) -> Iterator[bytes]: + """ + Internal method to get all txs sorted by timestamp starting from an optional `tx_start`. + The `inclusive` param sets whether `tx_start` should be included. + """ + raise NotImplementedError diff --git a/hathor/manager.py b/hathor/manager.py index 4ac9326f4..1e6d4cdeb 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import sys import time from cProfile import Profile @@ -45,6 +44,10 @@ from hathor.feature_activation.bit_signaling_service import BitSignalingService from hathor.mining import BlockTemplate, BlockTemplates from hathor.mining.cpu_mining_service import CpuMiningService +from hathor.nanocontracts.exception import NanoContractDoesNotExist +from hathor.nanocontracts.runner import Runner +from hathor.nanocontracts.runner.runner import RunnerFactory +from hathor.nanocontracts.storage import NCBlockStorage, NCContractStorage from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_id import PeerId @@ -53,7 +56,6 @@ from hathor.reward_lock import is_spent_reward_locked from hathor.stratum import StratumFactory from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion -from hathor.transaction.exceptions import TxValidationError from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope @@ -109,13 +111,13 @@ def __init__( execution_manager: ExecutionManager, vertex_handler: VertexHandler, vertex_parser: VertexParser, + runner_factory: RunnerFactory, hostname: Optional[str] = None, wallet: Optional[BaseWallet] = None, capabilities: Optional[list[str]] = None, checkpoints: Optional[list[Checkpoint]] = None, rng: Optional[Random] = None, environment_info: Optional[EnvironmentInfo] = None, - full_verification: bool = False, enable_event_queue: bool = False, poa_block_producer: PoaBlockProducer | None = None, # Websocket factory @@ -197,6 +199,7 @@ def __init__( self.connections = p2p_manager self.vertex_handler = vertex_handler self.vertex_parser = vertex_parser + self.runner_factory = runner_factory self.websocket_factory = websocket_factory @@ -223,10 +226,6 @@ def __init__( # Thread pool used to resolve pow when sending tokens self.pow_thread_pool = ThreadPool(minthreads=0, maxthreads=settings.MAX_POW_THREADS, name='Pow thread pool') - # Full verification execute all validations for transactions and blocks when initializing the node - # Can be activated on the command line with --full-verification - self._full_verification = full_verification - # List of whitelisted peers self.peers_whitelist: list[PeerId] = [] @@ -272,33 +271,16 @@ def start(self) -> None: ) sys.exit(-1) - # If it's a full verification, we save on the storage that we are starting it - # this is required because if we stop the initilization in the middle, the metadata - # saved on the storage is not reliable anymore, only if we finish it - if self._full_verification: - self.tx_storage.start_full_verification() - else: - # If it's a fast initialization and the last time a full initialization stopped in the middle - # we can't allow the full node to continue, so we need to remove the storage and do a full sync - # or execute an initialization with full verification - if self.tx_storage.is_running_full_verification(): - self.log.error( - 'Error initializing node. The last time you started your node you did a full verification ' - 'that was stopped in the middle. The storage is not reliable anymore and, because of that, ' - 'you must initialize with a full verification again or remove your storage and do a full sync.' - ) - sys.exit(-1) - - # If self.tx_storage.is_running_manager() is True, the last time the node was running it had a sudden crash - # because of that, we must run a full verification because some storage data might be wrong. - # The metadata is the only piece of the storage that may be wrong, not the blocks and transactions. - if self.tx_storage.is_running_manager(): - self.log.error( - 'Error initializing node. The last time you executed your full node it wasn\'t stopped correctly. ' - 'The storage is not reliable anymore and, because of that, so you must run a full verification ' - 'or remove your storage and do a full sync.' - ) - sys.exit(-1) + # If self.tx_storage.is_running_manager() is True, the last time the node was running it had a sudden crash + # because of that, we must run a sync from scratch or from a snapshot. + # The metadata is the only piece of the storage that may be wrong, not the blocks and transactions. + if self.tx_storage.is_running_manager(): + self.log.error( + 'Error initializing node. The last time you executed your full node it wasn\'t stopped correctly. ' + 'The storage is not reliable anymore and, because of that you must remove your storage and do a' + 'sync from scratch or from a snapshot.' + ) + sys.exit(-1) if self._enable_event_queue: self._event_manager.start(str(self.my_peer.id)) @@ -312,16 +294,7 @@ def start(self) -> None: self.tx_storage.disable_lock() # Open scope for initialization. self.tx_storage.set_allow_scope(TxAllowScope.VALID | TxAllowScope.PARTIAL | TxAllowScope.INVALID) - # Initialize manager's components. - if self._full_verification: - self.tx_storage.reset_indexes() - self._initialize_components_full_verification() - # Before calling self._initialize_components_full_verification() I start 'full verification' mode and - # after that I need to finish it. It's just to know if the full node has stopped a full initialization - # in the middle. - self.tx_storage.finish_full_verification() - else: - self._initialize_components_new() + self._initialize_components() self.tx_storage.set_allow_scope(TxAllowScope.VALID) self.tx_storage.enable_lock() @@ -414,159 +387,37 @@ def stop_profiler(self, save_to: Optional[str] = None) -> None: if save_to: self.profiler.dump_stats(save_to) - def _initialize_components_full_verification(self) -> None: - """You are not supposed to run this method manually. You should run `doStart()` to initialize the - manager. - - This method runs through all transactions, verifying them and updating our wallet. - """ - assert not self._enable_event_queue, 'this method cannot be used if the events feature is enabled.' - assert self._full_verification - - self.log.info('initialize') - if self.wallet: - self.wallet._manually_initialize() - t0 = time.time() - t1 = t0 - cnt = 0 - cnt2 = 0 - t2 = t0 - h = 0 - - block_count = 0 - tx_count = 0 - - self.tx_storage.pre_init() - assert self.tx_storage.indexes is not None - - self._verify_soft_voided_txs() - - # Checkpoints as {height: hash} - checkpoint_heights = {} - for cp in self.checkpoints: - checkpoint_heights[cp.height] = cp.hash - - # self.start_profiler() - self.log.debug('reset all metadata') - for tx in self.tx_storage.get_all_transactions(): - tx.reset_metadata() - - self.log.debug('load blocks and transactions') - for tx in self.tx_storage._topological_sort_dfs(): - tx_meta = tx.get_metadata() - - t2 = time.time() - dt = LogDuration(t2 - t1) - dcnt = cnt - cnt2 - tx_rate = '?' if dt == 0 else dcnt / dt - h = max(h, (tx.static_metadata.height if isinstance(tx, Block) else 0)) - if dt > 30: - ts_date = datetime.datetime.fromtimestamp(self.tx_storage.latest_timestamp) - if h == 0: - self.log.debug('start loading transactions...') - else: - self.log.info('load transactions...', tx_rate=tx_rate, tx_new=dcnt, dt=dt, - total=cnt, latest_ts=ts_date, height=h) - t1 = t2 - cnt2 = cnt - cnt += 1 - - # It's safe to skip block weight verification during initialization because - # we trust the difficulty stored in metadata - skip_block_weight_verification = True - if block_count % self._settings.VERIFY_WEIGHT_EVERY_N_BLOCKS == 0: - skip_block_weight_verification = False - - try: - # TODO: deal with invalid tx - tx._update_parents_children_metadata() - - if self.tx_storage.can_validate_full(tx): - tx.update_initial_metadata() - if tx.is_genesis: - assert tx.validate_checkpoint(self.checkpoints) - assert self.verification_service.validate_full( - tx, - skip_block_weight_verification=skip_block_weight_verification - ) - self.tx_storage.add_to_indexes(tx) - with self.tx_storage.allow_only_valid_context(): - self.consensus_algorithm.unsafe_update(tx) - self.tx_storage.indexes.update(tx) - if self.tx_storage.indexes.mempool_tips is not None: - self.tx_storage.indexes.mempool_tips.update(tx) # XXX: move to indexes.update - self.tx_storage.save_transaction(tx, only_metadata=True) - else: - assert self.verification_service.validate_basic( - tx, - skip_block_weight_verification=skip_block_weight_verification - ) - self.tx_storage.save_transaction(tx, only_metadata=True) - except (InvalidNewTransaction, TxValidationError): - self.log.error('unexpected error when initializing', tx=tx, exc_info=True) - raise - - if tx.is_block: - block_count += 1 - - # this works because blocks on the best chain are iterated from lower to higher height - assert tx_meta.validation.is_at_least_basic() - assert isinstance(tx, Block) - blk_height = tx.get_height() - if not tx_meta.voided_by and tx_meta.validation.is_fully_connected(): - # XXX: this might not be needed when making a full init because the consensus should already have - self.tx_storage.indexes.height.add_reorg(blk_height, tx.hash, tx.timestamp) - - # Check if it's a checkpoint block - if blk_height in checkpoint_heights: - if tx.hash == checkpoint_heights[blk_height]: - del checkpoint_heights[blk_height] - else: - # If the hash is different from checkpoint hash, we stop the node - self.log.error('Error initializing the node. Checkpoint validation error.') - sys.exit() - else: - tx_count += 1 - - if time.time() - t2 > 1: - dt = LogDuration(time.time() - t2) - self.log.warn('tx took too long to load', tx=tx.hash_hex, dt=dt) - - # we have to have a best_block by now - # assert best_block is not None - - self.tx_storage.indexes._manually_initialize(self.tx_storage) - - self.log.debug('done loading transactions') - - # Check if all checkpoints in database are ok - my_best_height = self.tx_storage.get_height_best_block() - if checkpoint_heights: - # If I have checkpoints that were not validated I must check if they are all in a height I still don't have - first = min(list(checkpoint_heights.keys())) - if first <= my_best_height: - # If the height of the first checkpoint not validated is lower than the height of the best block - # Then it's missing this block - self.log.error('Error initializing the node. Checkpoint validation error.') - sys.exit() - - best_height = self.tx_storage.get_height_best_block() - if best_height != h: - self.log.warn('best height doesn\'t match', best_height=best_height, max_height=h) - - # self.stop_profiler(save_to='profiles/initializing.prof') - self.state = self.NodeState.READY - - total_load_time = LogDuration(t2 - t0) - tx_rate = '?' if total_load_time == 0 else cnt / total_load_time - - environment_info = self.environment_info.as_dict() if self.environment_info else {} + def get_nc_runner(self, block: Block) -> Runner: + """Return a contract runner for a given block.""" + nc_storage_factory = self.consensus_algorithm.nc_storage_factory + block_storage = nc_storage_factory.get_block_storage_from_block(block) + return self.runner_factory.create(block_storage=block_storage) + + def get_best_block_nc_runner(self) -> Runner: + """Return a contract runner for the best block.""" + best_block = self.tx_storage.get_best_block() + return self.get_nc_runner(best_block) + + def get_nc_block_storage(self, block: Block) -> NCBlockStorage: + """Return the nano block storage for a given block.""" + return self.consensus_algorithm.nc_storage_factory.get_block_storage_from_block(block) + + def get_nc_storage(self, block: Block, nc_id: VertexId) -> NCContractStorage: + """Return a contract storage with the contract state at a given block.""" + from hathor.nanocontracts.types import ContractId, VertexId as NCVertexId + block_storage = self.get_nc_block_storage(block) + try: + contract_storage = block_storage.get_contract_storage(ContractId(NCVertexId(nc_id))) + except KeyError: + raise NanoContractDoesNotExist(nc_id.hex()) + return contract_storage - # Changing the field names in this log could impact log collectors that parse them - self.log.info('ready', vertex_count=cnt, tx_rate=tx_rate, total_load_time=total_load_time, height=h, - blocks=block_count, txs=tx_count, **environment_info) + def get_best_block_nc_storage(self, nc_id: VertexId) -> NCContractStorage: + """Return a contract storage with the contract state at the best block.""" + best_block = self.tx_storage.get_best_block() + return self.get_nc_storage(best_block, nc_id) - def _initialize_components_new(self) -> None: + def _initialize_components(self) -> None: """You are not supposed to run this method manually. You should run `doStart()` to initialize the manager. @@ -593,10 +444,6 @@ def _initialize_components_new(self) -> None: started_at=started_at, last_started_at=last_started_at) self._verify_soft_voided_txs() - - # TODO: move support for full-verification here, currently we rely on the original _initialize_components - # method for full-verification to work, if we implement it here we'll reduce a lot of duplicate and - # complex code self.tx_storage.indexes._manually_initialize(self.tx_storage) # Verify if all checkpoints that exist in the database are correct @@ -896,7 +743,7 @@ def get_tokens_issued_per_block(self, height: int) -> int: """Return the number of tokens issued (aka reward) per block of a given height.""" return self.daa.get_tokens_issued_per_block(height) - def submit_block(self, blk: Block, fails_silently: bool = True) -> bool: + def submit_block(self, blk: Block) -> bool: """Used by submit block from all mining APIs. """ tips = self.tx_storage.get_best_block_tips() @@ -913,7 +760,7 @@ def submit_block(self, blk: Block, fails_silently: bool = True) -> bool: ) if blk.weight <= min_insignificant_weight: self.log.warn('submit_block(): insignificant weight? accepted anyway', blk=blk.hash_hex, weight=blk.weight) - return self.propagate_tx(blk, fails_silently=fails_silently) + return self.propagate_tx(blk) def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False, max_output_script_size: int | None = None) -> None: @@ -944,9 +791,9 @@ def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False, if not tx_from_lib.is_standard(max_output_script_size, not allow_non_standard_script): raise NonStandardTxError('Transaction is non standard.') - self.propagate_tx(tx, fails_silently=False) + self.propagate_tx(tx) - def propagate_tx(self, tx: BaseTransaction, fails_silently: bool = True) -> bool: + def propagate_tx(self, tx: BaseTransaction) -> bool: """Push a new transaction to the network. It is used by both the wallet and the mining modules. :return: True if the transaction was accepted @@ -957,33 +804,29 @@ def propagate_tx(self, tx: BaseTransaction, fails_silently: bool = True) -> bool else: tx.storage = self.tx_storage - return self.on_new_tx(tx, fails_silently=fails_silently, propagate_to_peers=True) + return self.on_new_tx(tx, propagate_to_peers=True) def on_new_tx( self, - tx: BaseTransaction, + vertex: BaseTransaction, *, quiet: bool = False, - fails_silently: bool = True, propagate_to_peers: bool = True, - reject_locked_reward: bool = True + reject_locked_reward: bool = True, ) -> bool: """ New method for adding transactions or blocks that steps the validation state machine. :param tx: transaction to be added :param quiet: if True will not log when a new tx is accepted - :param fails_silently: if False will raise an exception when tx cannot be added :param propagate_to_peers: if True will relay the tx to other peers if it is accepted """ - success = self.vertex_handler.on_new_vertex( - tx, - quiet=quiet, - fails_silently=fails_silently, - reject_locked_reward=reject_locked_reward, - ) + from hathor.verification.verification_params import VerificationParams + + params = VerificationParams(enable_checkdatasig_count=True, reject_locked_reward=reject_locked_reward) + success = self.vertex_handler._old_on_new_vertex(vertex, params, quiet=quiet) if propagate_to_peers and success: - self.connections.send_tx_to_peers(tx) + self.connections.send_tx_to_peers(vertex) return success diff --git a/hathor/merged_mining/coordinator.py b/hathor/merged_mining/coordinator.py index 88905692a..1873fa95a 100644 --- a/hathor/merged_mining/coordinator.py +++ b/hathor/merged_mining/coordinator.py @@ -233,7 +233,7 @@ def build_aux_pow(self, work: SingleMinerWork) -> BitcoinAuxPow: bitcoin_header, coinbase_tx = self._make_bitcoin_block_and_coinbase(work) header = bytes(bitcoin_header) header_head, header_tail = header[:36], header[-12:] - block_base_hash = self.hathor_block.get_base_hash() + block_base_hash = self.hathor_block.get_mining_base_hash() coinbase = bytes(coinbase_tx) assert block_base_hash in coinbase coinbase_head, coinbase_tail = coinbase.split(block_base_hash) @@ -619,7 +619,7 @@ def handle_submit(self, params: list[Any], msgid: Optional[str]) -> None: self.last_submit_at = time.time() bitcoin_block_header = job.build_bitcoin_block_header(work) - block_base_hash = job.hathor_block.get_base_hash() + block_base_hash = job.hathor_block.get_mining_base_hash() block_hash = Hash(bitcoin_block_header.hash) self.log.debug('work received', bitcoin_header=bytes(bitcoin_block_header).hex(), hathor_block=job.hathor_block, block_base_hash=block_base_hash.hex(), @@ -1068,7 +1068,7 @@ def _new_single_miner_job(self, job_id: str, xnonce1: bytes, xnonce2_size: int, hathor_block.update_hash() # build coinbase transaction with hathor block hash - hathor_block_hash = hathor_block.get_base_hash() + hathor_block_hash = hathor_block.get_mining_base_hash() coinbase_tx = self.bitcoin_coord.make_coinbase_transaction( hathor_block_hash, payback_script_bitcoin, diff --git a/hathor/nanocontracts/__init__.py b/hathor/nanocontracts/__init__.py new file mode 100644 index 000000000..f7b85e25a --- /dev/null +++ b/hathor/nanocontracts/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.on_chain_blueprint import OnChainBlueprint +from hathor.nanocontracts.runner import Runner +from hathor.nanocontracts.storage import NCMemoryStorageFactory, NCRocksDBStorageFactory, NCStorageFactory +from hathor.nanocontracts.types import fallback, public, view + +# Identifier used in metadata's voided_by when a Nano Contract method fails. +NC_EXECUTION_FAIL_ID: bytes = b'nc-fail' + +__all__ = [ + 'Blueprint', + 'Context', + 'Runner', + 'OnChainBlueprint', + 'NCFail', + 'NCMemoryStorageFactory', + 'NCRocksDBStorageFactory', + 'NCStorageFactory', + 'public', + 'fallback', + 'view', + 'NC_EXECUTION_FAIL_ID', +] diff --git a/hathor/nanocontracts/api_arguments_parser.py b/hathor/nanocontracts/api_arguments_parser.py new file mode 100644 index 000000000..41b595a83 --- /dev/null +++ b/hathor/nanocontracts/api_arguments_parser.py @@ -0,0 +1,46 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from typing import Any + +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.exception import NCMethodNotFound +from hathor.nanocontracts.method import Method + + +def parse_nc_method_call(blueprint_class: type[Blueprint], call_info: str) -> tuple[str, Any]: + """Parse a string that represents an invocation to a Nano Contract method. + + The string must be in the following format: `method(arg1, arg2, arg3)`. + + The arguments must be in JSON format; tuples and namedtuples should be replaced by a list. + + Here are some examples: + - add(1, 2) + - set_result("1x2") + """ + if not call_info.endswith(')'): + raise ValueError + + method_name, _, arguments_raw = call_info[:-1].partition('(') + method_callable = getattr(blueprint_class, method_name, None) + if method_callable is None: + raise NCMethodNotFound(f'{blueprint_class.__name__}.{method_name}') + + args_json = json.loads(f'[{arguments_raw}]') + method = Method.from_callable(method_callable) + parsed_args = method.args.json_to_value(args_json) + + return method_name, parsed_args diff --git a/hathor/nanocontracts/balance_rules.py b/hathor/nanocontracts/balance_rules.py new file mode 100644 index 000000000..0cd0c6c97 --- /dev/null +++ b/hathor/nanocontracts/balance_rules.py @@ -0,0 +1,230 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Generic, TypeVar + +from typing_extensions import assert_never, override + +from hathor.conf.settings import HATHOR_TOKEN_UID, HathorSettings +from hathor.nanocontracts.exception import NCInvalidAction +from hathor.nanocontracts.storage import NCChangesTracker +from hathor.nanocontracts.types import ( + BaseAction, + NCAcquireAuthorityAction, + NCAction, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, +) +from hathor.transaction.transaction import TokenInfo +from hathor.types import TokenUid + +T = TypeVar('T', bound=BaseAction) + + +class BalanceRules(ABC, Generic[T]): + """ + An abstract base class that unifies balance rules for NCActions. + + Requires definitions for a verification-phase rule and two nano contract execution-phase rules, one for the callee, + which is always a contract, and one for the caller, which may be a transaction or another contract. + """ + + __slots__ = ('settings', 'action') + + def __init__(self, settings: HathorSettings, action: T) -> None: + self.settings = settings + self.action = action + + @abstractmethod + def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + """ + Define how the respective action interacts with the transaction's + token_dict during the verification phase, updating it. + """ + raise NotImplementedError + + @abstractmethod + def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: + """ + Define how the respective action interacts with the transaction's changes tracker during nano contract + execution, updating it, on the callee side. + """ + raise NotImplementedError + + @abstractmethod + def nc_caller_execution_rule(self, caller_changes_tracker: NCChangesTracker) -> None: + """ + Define how the respective action interacts with the transaction's changes tracker during nano contract + execution, updating it, on the caller side — that is, when a contract calls another contract. + """ + raise NotImplementedError + + @staticmethod + def get_rules(settings: HathorSettings, action: NCAction) -> BalanceRules: + """Get the balance rules instance for the provided action.""" + match action: + case NCDepositAction(): + return _DepositRules(settings, action) + case NCWithdrawalAction(): + return _WithdrawalRules(settings, action) + case NCGrantAuthorityAction(): + return _GrantAuthorityRules(settings, action) + case NCAcquireAuthorityAction(): + return _AcquireAuthorityRules(settings, action) + case _: + assert_never(action) + + +class _DepositRules(BalanceRules[NCDepositAction]): + """ + Define balance rules for the DEPOSIT action. + + - In the verification-phase, the amount is removed from the tx inputs/outputs balance. + - In the execution-phase (callee), the amount is added to the nano contract balance. + - In the execution-phase (caller), the amount is removed from the nano contract balance. + """ + + @override + def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info.amount = token_info.amount + self.action.amount + token_dict[self.action.token_uid] = token_info + + @override + def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: + callee_changes_tracker.add_balance(self.action.token_uid, self.action.amount) + + @override + def nc_caller_execution_rule(self, caller_changes_tracker: NCChangesTracker) -> None: + caller_changes_tracker.add_balance(self.action.token_uid, -self.action.amount) + + +class _WithdrawalRules(BalanceRules[NCWithdrawalAction]): + """ + Define balance rules for the WITHDRAWAL action. + + - In the verification-phase, the amount is added to the tx inputs/outputs balance. + - In the execution-phase (callee), the amount is removed from the nano contract balance. + - In the execution-phase (caller), the amount is added to the nano contract balance. + """ + + @override + def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info.amount = token_info.amount - self.action.amount + token_dict[self.action.token_uid] = token_info + + @override + def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: + callee_changes_tracker.add_balance(self.action.token_uid, -self.action.amount) + + @override + def nc_caller_execution_rule(self, caller_changes_tracker: NCChangesTracker) -> None: + caller_changes_tracker.add_balance(self.action.token_uid, self.action.amount) + + +class _GrantAuthorityRules(BalanceRules[NCGrantAuthorityAction]): + """ + Define balance rules for the GRANT_AUTHORITY action. + + - In the verification phase, we check whether the tx inputs can grant the authorities to the nano contract. + - In the execution phase (callee), the authorities are granted to the nano contract. + - In the execution phase (caller), we check whether the balance can grant the authorities to the called contract. + """ + + @override + def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + assert self.action.token_uid != HATHOR_TOKEN_UID + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + if self.action.mint and not token_info.can_mint: + raise NCInvalidAction( + f'{self.action.name} token {self.action.token_uid.hex()} requires mint, but no input has it' + ) + + if self.action.melt and not token_info.can_melt: + raise NCInvalidAction( + f'{self.action.name} token {self.action.token_uid.hex()} requires melt, but no input has it' + ) + + @override + def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: + assert self.action.token_uid != HATHOR_TOKEN_UID + callee_changes_tracker.grant_authorities( + self.action.token_uid, + grant_mint=self.action.mint, + grant_melt=self.action.melt, + ) + + @override + def nc_caller_execution_rule(self, caller_changes_tracker: NCChangesTracker) -> None: + if self.action.token_uid == HATHOR_TOKEN_UID: + raise NCInvalidAction('cannot grant authorities for HTR token') + + balance = caller_changes_tracker.get_balance(self.action.token_uid) + + if self.action.mint and not balance.can_mint: + raise NCInvalidAction( + f'{self.action.name} token {self.action.token_uid.hex()} requires mint, ' + f'but contract does not have that authority' + ) + + if self.action.melt and not balance.can_melt: + raise NCInvalidAction( + f'{self.action.name} token {self.action.token_uid.hex()} requires melt, ' + f'but contract does not have that authority' + ) + + +class _AcquireAuthorityRules(BalanceRules[NCAcquireAuthorityAction]): + """ + Define balance rules for the ACQUIRE_AUTHORITY action. + + - In the verification phase, we allow the respective authorities in the transaction's token_info. + - In the execution phase (callee), we check whether the nano contract balance can grant those authorities. + - In the execution phase (caller), we grant the authorities to the caller. + """ + + @override + def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + assert self.action.token_uid != HATHOR_TOKEN_UID + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info.can_mint = token_info.can_mint or self.action.mint + token_info.can_melt = token_info.can_melt or self.action.melt + token_dict[self.action.token_uid] = token_info + + @override + def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: + assert self.action.token_uid != HATHOR_TOKEN_UID + balance = callee_changes_tracker.get_balance(self.action.token_uid) + + if self.action.mint and not balance.can_mint: + raise NCInvalidAction(f'cannot acquire mint authority for token {self.action.token_uid.hex()}') + + if self.action.melt and not balance.can_melt: + raise NCInvalidAction(f'cannot acquire melt authority for token {self.action.token_uid.hex()}') + + @override + def nc_caller_execution_rule(self, caller_changes_tracker: NCChangesTracker) -> None: + if self.action.token_uid == HATHOR_TOKEN_UID: + raise NCInvalidAction('cannot acquire authorities for HTR token') + + caller_changes_tracker.grant_authorities( + self.action.token_uid, + grant_mint=self.action.mint, + grant_melt=self.action.melt, + ) diff --git a/hathor/nanocontracts/blueprint.py b/hathor/nanocontracts/blueprint.py new file mode 100644 index 000000000..c164562af --- /dev/null +++ b/hathor/nanocontracts/blueprint.py @@ -0,0 +1,144 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, final + +from hathor.nanocontracts.blueprint_env import BlueprintEnvironment +from hathor.nanocontracts.exception import BlueprintSyntaxError +from hathor.nanocontracts.nc_types.utils import pretty_type +from hathor.nanocontracts.types import NC_FALLBACK_METHOD, NC_INITIALIZE_METHOD, NC_METHOD_TYPE_ATTR, NCMethodType + +if TYPE_CHECKING: + from hathor.nanocontracts.nc_exec_logs import NCLogger + +FORBIDDEN_NAMES = { + 'syscall', + 'log', +} + +NC_FIELDS_ATTR: str = '__fields' + + +class _BlueprintBase(type): + """Metaclass for blueprints. + + This metaclass will modify the attributes and set Fields to them according to their types. + """ + + def __new__(cls, name, bases, attrs, **kwargs): + from hathor.nanocontracts.fields import make_field_for_type + + # Initialize only subclasses of Blueprint. + parents = [b for b in bases if isinstance(b, _BlueprintBase)] + if not parents: + return super().__new__(cls, name, bases, attrs, **kwargs) + + cls._validate_initialize_method(attrs) + cls._validate_fallback_method(attrs) + nc_fields = attrs.get('__annotations__', {}) + + # Check for forbidden names. + for field_name in nc_fields: + if field_name in FORBIDDEN_NAMES: + raise BlueprintSyntaxError(f'field name is forbidden: `{field_name}`') + + if field_name.startswith('_'): + raise BlueprintSyntaxError(f'field name cannot start with underscore: `{field_name}`') + + # Create the fields attribute with the type for each field. + attrs[NC_FIELDS_ATTR] = nc_fields + + # Use an empty __slots__ to prevent storing any attributes directly on instances. + # The declared attributes are stored as fields on the class, so they still work despite the empty slots. + attrs['__slots__'] = tuple() + + # Finally, create class! + new_class = super().__new__(cls, name, bases, attrs, **kwargs) + + # Create the Field instance according to each type. + for field_name, field_type in attrs[NC_FIELDS_ATTR].items(): + value = getattr(new_class, field_name, None) + if value is None: + # This is the case when a type is specified but not a value. + # Example: + # name: str + # age: int + try: + field = make_field_for_type(field_name, field_type) + except TypeError: + raise BlueprintSyntaxError( + f'unsupported field type: `{field_name}: {pretty_type(field_type)}`' + ) + setattr(new_class, field_name, field) + else: + # This is the case when a value is specified. + # Example: + # name: str = StrField() + # + # This was not implemented yet and will be extended later. + raise BlueprintSyntaxError(f'fields with default values are currently not supported: `{field_name}`') + + return new_class + + @staticmethod + def _validate_initialize_method(attrs: Any) -> None: + if NC_INITIALIZE_METHOD not in attrs: + raise BlueprintSyntaxError(f'blueprints require a method called `{NC_INITIALIZE_METHOD}`') + + method = attrs[NC_INITIALIZE_METHOD] + method_type = getattr(method, NC_METHOD_TYPE_ATTR, None) + + if method_type is not NCMethodType.PUBLIC: + raise BlueprintSyntaxError(f'`{NC_INITIALIZE_METHOD}` method must be annotated with @public') + + @staticmethod + def _validate_fallback_method(attrs: Any) -> None: + if NC_FALLBACK_METHOD not in attrs: + return + + method = attrs[NC_FALLBACK_METHOD] + method_type = getattr(method, NC_METHOD_TYPE_ATTR, None) + + if method_type is not NCMethodType.FALLBACK: + raise BlueprintSyntaxError(f'`{NC_FALLBACK_METHOD}` method must be annotated with @fallback') + + +class Blueprint(metaclass=_BlueprintBase): + """Base class for all blueprints. + + Example: + + class MyBlueprint(Blueprint): + name: str + age: int + """ + + __slots__ = ('__env',) + + def __init__(self, env: BlueprintEnvironment) -> None: + self.__env = env + + @final + @property + def syscall(self) -> BlueprintEnvironment: + """Return the syscall provider for the current contract.""" + return self.__env + + @final + @property + def log(self) -> NCLogger: + """Return the logger for the current contract.""" + return self.syscall.__log__ diff --git a/hathor/nanocontracts/blueprint_env.py b/hathor/nanocontracts/blueprint_env.py new file mode 100644 index 000000000..7a9f8f187 --- /dev/null +++ b/hathor/nanocontracts/blueprint_env.py @@ -0,0 +1,257 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Optional, Sequence, final + +from hathor.nanocontracts.storage import NCContractStorage +from hathor.nanocontracts.types import Amount, BlueprintId, ContractId, NCAction, TokenUid + +if TYPE_CHECKING: + from hathor.nanocontracts.nc_exec_logs import NCLogger + from hathor.nanocontracts.rng import NanoRNG + from hathor.nanocontracts.runner import Runner + from hathor.nanocontracts.types import NCArgs + + +class BlueprintEnvironment: + """A class that holds all possible interactions a blueprint may have with the system.""" + + __slots__ = ('__runner', '__log__', '__storage__', '__cache__') + + def __init__( + self, + runner: Runner, + nc_logger: NCLogger, + storage: NCContractStorage, + *, + disable_cache: bool = False, + ) -> None: + self.__log__ = nc_logger + self.__runner = runner + self.__storage__ = storage + # XXX: we could replace dict|None with a Cache that can be disabled, cleared, limited, etc + self.__cache__: dict[str, Any] | None = None if disable_cache else {} + + @final + @property + def rng(self) -> NanoRNG: + """Return an RNG for the current contract.""" + return self.__runner.syscall_get_rng() + + @final + def get_contract_id(self) -> ContractId: + """Return the current contract id.""" + return self.__runner.get_current_contract_id() + + @final + def get_blueprint_id(self, contract_id: Optional[ContractId] = None) -> BlueprintId: + """Return the blueprint id of a nano contract. By default, it returns for the current contract.""" + if contract_id is None: + contract_id = self.get_contract_id() + return self.__runner.get_blueprint_id(contract_id) + + def get_balance_before_current_call( + self, + token_uid: Optional[TokenUid] = None, + *, + contract_id: Optional[ContractId] = None, + ) -> Amount: + """ + Return the balance for a given token before the current call, that is, + excluding any actions and changes in the current call. + + For instance, if a contract has 50 HTR and the call is requesting to withdraw 3 HTR, + then this method will return 50 HTR.""" + balance = self.__runner.get_balance_before_current_call(contract_id, token_uid) + return Amount(balance.value) + + def get_current_balance( + self, + token_uid: Optional[TokenUid] = None, + *, + contract_id: Optional[ContractId] = None, + ) -> Amount: + """ + Return the current balance for a given token, which includes all actions and changes in the current call. + + For instance, if a contract has 50 HTR and the call is requesting to withdraw 3 HTR, + then this method will return 47 HTR. + """ + balance = self.__runner.get_current_balance(contract_id, token_uid) + return Amount(balance.value) + + @final + def can_mint_before_current_call( + self, + token_uid: TokenUid, + *, + contract_id: Optional[ContractId] = None, + ) -> bool: + """ + Return whether a given token could be minted before the current call, that is, + excluding any actions and changes in the current call. + + For instance, if a contract has a mint authority and a call is revoking it, + then this method will return `True`. + """ + balance = self.__runner.get_balance_before_current_call(contract_id, token_uid) + return balance.can_mint + + @final + def can_mint( + self, + token_uid: TokenUid, + *, + contract_id: Optional[ContractId] = None, + ) -> bool: + """ + Return whether a given token can currently be minted, + which includes all actions and changes in the current call. + + For instance, if a contract has a mint authority and a call is revoking it, + then this method will return `False`. + """ + balance = self.__runner.get_current_balance(contract_id, token_uid) + return balance.can_mint + + @final + def can_melt_before_current_call( + self, + token_uid: TokenUid, + *, + contract_id: Optional[ContractId] = None, + ) -> bool: + """ + Return whether a given token could be melted before the current call, that is, + excluding any actions and changes in the current call. + + For instance, if a contract has a melt authority and a call is revoking it, + then this method will return `True`. + """ + balance = self.__runner.get_balance_before_current_call(contract_id, token_uid) + return balance.can_melt + + @final + def can_melt( + self, + token_uid: TokenUid, + *, + contract_id: Optional[ContractId] = None, + ) -> bool: + """ + Return whether a given token can currently be melted, + which includes all actions and changes in the current call. + + For instance, if a contract has a melt authority and a transaction is revoking it, + then this method will return `False`. + """ + balance = self.__runner.get_current_balance(contract_id, token_uid) + return balance.can_melt + + @final + def call_public_method( + self, + nc_id: ContractId, + method_name: str, + actions: Sequence[NCAction], + *args: Any, + **kwargs: Any, + ) -> Any: + """Call a public method of another contract.""" + return self.__runner.syscall_call_another_contract_public_method(nc_id, method_name, actions, args, kwargs) + + @final + def proxy_call_public_method( + self, + blueprint_id: BlueprintId, + method_name: str, + actions: Sequence[NCAction], + *args: Any, + **kwargs: Any, + ) -> Any: + """Execute a proxy call to a public method of another blueprint.""" + return self.__runner.syscall_proxy_call_public_method(blueprint_id, method_name, actions, args, kwargs) + + @final + def proxy_call_public_method_nc_args( + self, + blueprint_id: BlueprintId, + method_name: str, + actions: Sequence[NCAction], + nc_args: NCArgs, + ) -> Any: + """Execute a proxy call to a public method of another blueprint.""" + return self.__runner.syscall_proxy_call_public_method_nc_args(blueprint_id, method_name, actions, nc_args) + + @final + def call_view_method(self, nc_id: ContractId, method_name: str, *args: Any, **kwargs: Any) -> Any: + """Call a view method of another contract.""" + return self.__runner.syscall_call_another_contract_view_method(nc_id, method_name, args, kwargs) + + @final + def revoke_authorities(self, token_uid: TokenUid, *, revoke_mint: bool, revoke_melt: bool) -> None: + """Revoke authorities from this nano contract.""" + self.__runner.syscall_revoke_authorities(token_uid=token_uid, revoke_mint=revoke_mint, revoke_melt=revoke_melt) + + @final + def mint_tokens(self, token_uid: TokenUid, amount: int) -> None: + """Mint tokens and add them to the balance of this nano contract.""" + self.__runner.syscall_mint_tokens(token_uid=token_uid, amount=amount) + + @final + def melt_tokens(self, token_uid: TokenUid, amount: int) -> None: + """Melt tokens by removing them from the balance of this nano contract.""" + self.__runner.syscall_melt_tokens(token_uid=token_uid, amount=amount) + + @final + def create_contract( + self, + blueprint_id: BlueprintId, + salt: bytes, + actions: Sequence[NCAction], + *args: Any, + **kwargs: Any, + ) -> tuple[ContractId, Any]: + """Create a new contract.""" + return self.__runner.syscall_create_another_contract(blueprint_id, salt, actions, args, kwargs) + + @final + def emit_event(self, data: bytes) -> None: + """Emit a custom event from a Nano Contract.""" + self.__runner.syscall_emit_event(data) + + @final + def create_token( + self, + token_name: str, + token_symbol: str, + amount: int, + mint_authority: bool = True, + melt_authority: bool = True, + ) -> TokenUid: + """Create a new token.""" + return self.__runner.syscall_create_child_token( + token_name, + token_symbol, + amount, + mint_authority, + melt_authority, + ) + + @final + def change_blueprint(self, blueprint_id: BlueprintId) -> None: + """Change the blueprint of this contract.""" + self.__runner.syscall_change_blueprint(blueprint_id) diff --git a/hathor/nanocontracts/blueprint_syntax_validation.py b/hathor/nanocontracts/blueprint_syntax_validation.py new file mode 100644 index 000000000..eb1307c41 --- /dev/null +++ b/hathor/nanocontracts/blueprint_syntax_validation.py @@ -0,0 +1,99 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import inspect +from typing import Callable + +from hathor.nanocontracts.exception import BlueprintSyntaxError + + +def validate_has_self_arg(fn: Callable, annotation_name: str) -> None: + """Validate the `self` arg of a callable.""" + arg_spec = inspect.getfullargspec(fn) + if len(arg_spec.args) == 0: + raise BlueprintSyntaxError(f'@{annotation_name} method must have `self` argument: `{fn.__name__}()`') + + if arg_spec.args[0] != 'self': + raise BlueprintSyntaxError( + f'@{annotation_name} method first argument must be called `self`: `{fn.__name__}()`' + ) + + if 'self' in arg_spec.annotations.keys(): + raise BlueprintSyntaxError(f'@{annotation_name} method `self` argument must not be typed: `{fn.__name__}()`') + + +def validate_method_types(fn: Callable) -> None: + """Validate the arg and return types of a callable.""" + special_args = ['self'] + arg_spec = inspect.getfullargspec(fn) + + if 'return' not in arg_spec.annotations: + raise BlueprintSyntaxError(f'missing return type on method `{fn.__name__}`') + + # TODO: This currently fails for types such as unions, probably because this is the wrong + # parsing function to use. Fix this. + # from hathor.nanocontracts.fields import get_field_class_for_attr + # return_type = arg_spec.annotations['return'] + # if return_type is not None: + # try: + # get_field_class_for_attr(return_type) + # except UnknownFieldType: + # raise BlueprintSyntaxError( + # f'unsupported return type `{return_type}` on method `{fn.__name__}`' + # ) + + for arg_name in arg_spec.args: + if arg_name in special_args: + continue + + if arg_name not in arg_spec.annotations: + raise BlueprintSyntaxError(f'argument `{arg_name}` on method `{fn.__name__}` must be typed') + + # TODO: This currently fails for @view methods with NamedTuple as args for example, + # because API calls use a different parsing function. Fix this. + # arg_type = arg_spec.annotations[arg_name] + # try: + # get_field_class_for_attr(arg_type) + # except UnknownFieldType: + # raise BlueprintSyntaxError( + # f'unsupported type `{arg_type.__name__}` on argument `{arg_name}` of method `{fn.__name__}`' + # ) + + +def validate_has_ctx_arg(fn: Callable, annotation_name: str) -> None: + """Validate the context arg of a callable.""" + arg_spec = inspect.getfullargspec(fn) + + if len(arg_spec.args) < 2: + raise BlueprintSyntaxError( + f'@{annotation_name} method must have `Context` argument: `{fn.__name__}()`' + ) + + from hathor.nanocontracts import Context + second_arg = arg_spec.args[1] + if arg_spec.annotations[second_arg] is not Context: + raise BlueprintSyntaxError( + f'@{annotation_name} method second arg `{second_arg}` argument must be of type `Context`: ' + f'`{fn.__name__}()`' + ) + + +def validate_has_not_ctx_arg(fn: Callable, annotation_name: str) -> None: + """Validate that a callable doesn't have a `Context` arg.""" + from hathor.nanocontracts import Context + arg_spec = inspect.getfullargspec(fn) + if Context in arg_spec.annotations.values(): + raise BlueprintSyntaxError(f'@{annotation_name} method cannot have arg with type `Context`: `{fn.__name__}()`') diff --git a/hathor/nanocontracts/blueprints/__init__.py b/hathor/nanocontracts/blueprints/__init__.py new file mode 100644 index 000000000..bf68061ff --- /dev/null +++ b/hathor/nanocontracts/blueprints/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Type + +if TYPE_CHECKING: + from hathor.nanocontracts.blueprint import Blueprint + + +_blueprints_mapper: dict[str, Type['Blueprint']] = {} diff --git a/hathor/nanocontracts/catalog.py b/hathor/nanocontracts/catalog.py new file mode 100644 index 000000000..e142a541f --- /dev/null +++ b/hathor/nanocontracts/catalog.py @@ -0,0 +1,42 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Type + +from hathor.nanocontracts.blueprints import _blueprints_mapper +from hathor.nanocontracts.types import BlueprintId + +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.blueprint import Blueprint + + +class NCBlueprintCatalog: + """Catalog of blueprints available.""" + + def __init__(self, blueprints: dict[bytes, Type['Blueprint']]) -> None: + self.blueprints = blueprints + + def get_blueprint_class(self, blueprint_id: BlueprintId) -> Type['Blueprint'] | None: + """Return the blueprint class related to the given blueprint id or None if it doesn't exist.""" + return self.blueprints.get(blueprint_id, None) + + +def generate_catalog_from_settings(settings: 'HathorSettings') -> NCBlueprintCatalog: + """Generate a catalog of blueprints based on the provided settings.""" + assert settings.ENABLE_NANO_CONTRACTS + blueprints: dict[bytes, Type['Blueprint']] = {} + for _id, _name in settings.BLUEPRINTS.items(): + blueprints[_id] = _blueprints_mapper[_name] + return NCBlueprintCatalog(blueprints) diff --git a/hathor/nanocontracts/context.py b/hathor/nanocontracts/context.py new file mode 100644 index 000000000..2002472fa --- /dev/null +++ b/hathor/nanocontracts/context.py @@ -0,0 +1,129 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict +from itertools import chain +from types import MappingProxyType +from typing import TYPE_CHECKING, Any, Sequence, final + +from hathor.crypto.util import get_address_b58_from_bytes +from hathor.nanocontracts.exception import NCFail, NCInvalidContext +from hathor.nanocontracts.types import Address, ContractId, NCAction, TokenUid +from hathor.nanocontracts.vertex_data import VertexData +from hathor.transaction.exceptions import TxValidationError + +if TYPE_CHECKING: + from hathor.transaction import BaseTransaction + +_EMPTY_MAP: MappingProxyType[TokenUid, tuple[NCAction, ...]] = MappingProxyType({}) + + +@final +class Context: + """Context passed to a method call. An empty list of actions means the + method is being called with no deposits and withdrawals. + + Deposits and withdrawals are grouped by token. Note that it is impossible + to have both a deposit and a withdrawal for the same token. + """ + __slots__ = ('__actions', '__address', '__vertex', '__timestamp', '__all_actions__') + __actions: MappingProxyType[TokenUid, tuple[NCAction, ...]] + __address: Address | ContractId + __vertex: VertexData + __timestamp: int + + def __init__( + self, + actions: Sequence[NCAction], + vertex: BaseTransaction | VertexData, + address: Address | ContractId, + timestamp: int, + ) -> None: + # Dict of action where the key is the token_uid. + # If empty, it is a method call without any actions. + if not actions: + self.__actions = _EMPTY_MAP + else: + from hathor.verification.nano_header_verifier import NanoHeaderVerifier + try: + NanoHeaderVerifier.verify_action_list(actions) + except TxValidationError as e: + raise NCInvalidContext('invalid nano context') from e + + actions_map: defaultdict[TokenUid, tuple[NCAction, ...]] = defaultdict(tuple) + for action in actions: + actions_map[action.token_uid] = (*actions_map[action.token_uid], action) + self.__actions = MappingProxyType(actions_map) + + self.__all_actions__: tuple[NCAction, ...] = tuple(chain(*self.__actions.values())) + + # Vertex calling the method. + if isinstance(vertex, VertexData): + self.__vertex = vertex + else: + self.__vertex = VertexData.create_from_vertex(vertex) + + # Address calling the method. + self.__address = address + + # Timestamp of the first block confirming tx. + self.__timestamp = timestamp + + @property + def vertex(self) -> VertexData: + return self.__vertex + + @property + def address(self) -> Address | ContractId: + return self.__address + + @property + def timestamp(self) -> int: + return self.__timestamp + + @property + def actions(self) -> MappingProxyType[TokenUid, tuple[NCAction, ...]]: + """Get a mapping of actions per token.""" + return self.__actions + + @property + def actions_list(self) -> list[NCAction]: + """Get a list of all actions.""" + return list(self.__all_actions__) + + def get_single_action(self, token_uid: TokenUid) -> NCAction: + """Get exactly one action for the provided token, and fail otherwise.""" + actions = self.actions.get(token_uid) + if actions is None or len(actions) != 1: + raise NCFail(f'expected exactly 1 action for token {token_uid.hex()}') + return actions[0] + + def copy(self) -> Context: + """Return a copy of the context.""" + return Context( + actions=list(self.__all_actions__), + vertex=self.vertex, + address=self.address, + timestamp=self.timestamp, + ) + + def to_json(self) -> dict[str, Any]: + """Return a JSON representation of the context.""" + return { + 'actions': [action.to_json() for action in self.__all_actions__], + 'address': get_address_b58_from_bytes(self.address), + 'timestamp': self.timestamp, + } diff --git a/hathor/nanocontracts/custom_builtins.py b/hathor/nanocontracts/custom_builtins.py new file mode 100644 index 000000000..37b50aba1 --- /dev/null +++ b/hathor/nanocontracts/custom_builtins.py @@ -0,0 +1,797 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import builtins +import types +from functools import partial +from operator import index +from typing import ( + Any, + Callable, + Iterable, + Iterator, + Mapping, + NoReturn, + Protocol, + Sequence, + SupportsIndex, + TypeVar, + final, +) + +from typing_extensions import Self, TypeVarTuple + +from hathor.nanocontracts.exception import NCDisabledBuiltinError +from hathor.nanocontracts.on_chain_blueprint import ALLOWED_IMPORTS, BLUEPRINT_CLASS_NAME + +T = TypeVar('T') +Ts = TypeVarTuple('Ts') + +WRAPPER_ASSIGNMENTS = ( + '__module__', + '__name__', + '__qualname__', + '__doc__', + '__annotations__', + '__type_params__', +) + +WRAPPER_UPDATES = ('__dict__',) + + +def _update_wrapper( + wrapper: T, + wrapped: T, + assigned: tuple[str, ...] = WRAPPER_ASSIGNMENTS, + updated: tuple[str, ...] = WRAPPER_UPDATES, +) -> T: + """ Behaves like functools.update_wrapper but with the important difference of not creating wrapper.__wrapped__ + """ + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + pass + else: + setattr(wrapper, attr, value) + for attr in updated: + value = getattr(wrapper, attr) + assert isinstance(value, dict), 'expected dict on updated attrs' + value.update(getattr(wrapped, attr, {})) + # Return the wrapper so this can be used as a decorator via partial() + return wrapper + + +def _wraps( + wrapped: T, + assigned: tuple[str, ...] = WRAPPER_ASSIGNMENTS, + updated: tuple[str, ...] = WRAPPER_UPDATES, +) -> Callable[[T], T]: + """ Like functools.wraps but with our _update_wrapper + """ + return partial(_update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated) + + +@_wraps(builtins.range, updated=tuple()) # type: ignore[arg-type] +@final +class custom_range: + """ Re-implementation of builtins.range in pure Python, so it will execute purely in Python's VM. + + XXX: @_wraps will replace this docstring's with the original docstring + """ + + __slots__ = ('_start', '_stop', '_step') + + @property + def start(self) -> int: + return self._start + + @property + def stop(self) -> int: + return self._stop + + @property + def step(self) -> int: + return self._step + + def __init__(self, *args: SupportsIndex) -> None: + match args: + case [stop]: + self._start = 0 + self._stop = index(stop) + self._step = 1 + case [start, stop]: + self._start = index(start) + self._stop = index(stop) + self._step = 1 + case [start, stop, step]: + self._start = index(start) + self._stop = index(stop) + self._step = index(step) + if self._step == 0: + raise ValueError('range() arg 3 must not be zero') + case _: + raise TypeError(f'range expected at most 3 arguments, got {len(args)}') + + def __repr__(self): + match (self._start, self._step): + case (0, 1): + return f'range({self._stop})' + case (_, 1): + return f'range({self._start}, {self._stop})' + case _: + return f'range({self._start}, {self._stop}, {self._step})' + + def count(self, value: int) -> int: + """rangeobject.count(value) -> integer -- return number of occurrences of value""" + return 1 if value in self else 0 + + def index(self, value: int) -> int: + """rangeobject.index(value) -> integer -- return index of value. + Raise ValueError if the value is not present. + """ + if value not in self: + raise ValueError(f'{value} is not in range') + return (value - self._start) // self._step + + def __len__(self) -> int: + if (self._step > 0 and self._start >= self._stop) or (self._step < 0 and self._start <= self._stop): + return 0 + if self._step > 0: + return (self._stop - self._start + self._step - 1) // self._step + else: + return (self._start - self._stop - self._step - 1) // -self._step + + def __bool__(self) -> bool: + return len(self) > 0 + + def __eq__(self, other: object) -> bool: + if not isinstance(other, type(self)): + return False + return self.start == other.start and self.stop == other.stop and self.step == other.step + + def __hash__(self) -> int: + return hash((self._start, self._stop, self._step)) + + def __contains__(self, value: object) -> bool: + if not isinstance(value, SupportsIndex): + return False + val = index(value) + if self._step > 0: + return self._start <= val < self._stop and (val - self._start) % self._step == 0 + else: + return self._start >= val > self._stop and (val - self._start) % self._step == 0 + + def __iter__(self) -> Iterator[int]: + current = self._start + while (self._step > 0 and current < self._stop) or (self._step < 0 and current > self._stop): + yield current + current += self._step + + def _getitem_int(self, key: SupportsIndex) -> int: + i = index(key) + if i < 0: + i += len(self) + if i < 0 or i >= len(self): + raise IndexError('range index out of range') + return self._start + i * self._step + + def _getitem_slice(self, key: slice) -> Self: + start, stop, step = key.indices(len(self)) + return type(self)(self._start + start * self._step, self._start + stop * self._step, self._step * step) + + def __getitem__(self, key: SupportsIndex | slice) -> int | Self: + if isinstance(key, slice): + return self._getitem_slice(key) + elif isinstance(key, SupportsIndex): + return self._getitem_int(key) + else: + raise TypeError(f'range indices must be integers or slices, not {type(key).__name__}') + + def __reversed__(self) -> Iterator[int]: + current = self._start + (len(self) - 1) * self._step + for _ in type(self)(len(self)): + yield current + current -= self._step + + +class ImportFunction(Protocol): + def __call__( + self, + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = (), + level: int = 0, + ) -> types.ModuleType: + ... + + +def _generate_restricted_import_function(allowed_imports: dict[str, set[str]]) -> ImportFunction: + """Returns a function equivalent to builtins.__import__ but that will only import `allowed_imports`""" + @_wraps(builtins.__import__) + def __import__( + name: str, + globals: Mapping[str, object] | None = None, + locals: Mapping[str, object] | None = None, + fromlist: Sequence[str] = (), + level: int = 0, + ) -> types.ModuleType: + if level != 0: + raise ImportError('Relative imports are not allowed') + if not fromlist and name != 'typing': + # XXX: typing is allowed here because Foo[T] triggers a __import__('typing', fromlist=None) for some reason + raise ImportError('Only `from ... import ...` imports are allowed') + if name not in allowed_imports: + raise ImportError(f'Import from "{name}" is not allowed.') + allowed_fromlist = allowed_imports[name] + for import_what in fromlist: + if import_what not in allowed_fromlist: + raise ImportError(f'Import from "{name}.{import_what}" is not allowed.') + return builtins.__import__(name=name, globals=globals, fromlist=fromlist, level=0) + return __import__ + + +def _generate_disabled_builtin_func(name: str) -> Callable[..., NoReturn]: + """Generate a function analogous to `func` but that will always raise an exception when called.""" + func = getattr(builtins, name, None) + msg = f'The use of `{name}` has been disabled' + + def __disabled__(*_args: Any, **_kwargs: Any) -> NoReturn: + raise NCDisabledBuiltinError(msg) + + assert func is not None + return _wraps(func)(__disabled__) + + +@_wraps(builtins.all) +def custom_all(iterable: Iterable[object]) -> bool: + """ Re-implementation of builtins.all in pure Python, so it will execute purely in Python's VM. + + XXX: @_wraps will replace this docstring's with the original docstring + """ + for i in iterable: + if not i: + return False + return True + + +@_wraps(builtins.any) +def custom_any(iterable: Iterable[object]) -> bool: + """ Re-implementation of builtins.any in pure Python, so it will execute purely in Python's VM. + + XXX: @_wraps will replace this docstring's with the original docstring + """ + for i in iterable: + if i: + return True + return False + + +@_wraps(builtins.enumerate) # type: ignore[arg-type] +def enumerate(iterable: Iterable[T], start: int = 0) -> Iterator[tuple[int, T]]: + """ Re-implementation of builtins.enumerate in pure Python, so it will execute purely in Python's VM. + + XXX: @_wraps will replace this docstring's with the original docstring + """ + k = start + for i in iterable: + yield (k, i) + k += 1 + + +@_wraps(builtins.filter) # type: ignore[arg-type] +def filter(function: None | Callable[[T], object], iterable: Iterable[T]) -> Iterator[T]: + """ Re-implementation of builtins.filter in pure Python, so it will execute purely in Python's VM. + + XXX: @_wraps will replace this docstring's with the original docstring + """ + fun = (lambda i: i is not None) if function is None else function + for i in iterable: + if fun(i): + yield i + + +# list of allowed builtins during execution of an on-chain blueprint code +EXEC_BUILTINS: dict[str, Any] = { + # XXX: check https://github.com/python/mypy/blob/master/mypy/typeshed/stdlib/builtins.pyi for the full typing + # XXX: check https://github.com/python/cpython/blob/main/Python/bltinmodule.c for the implementation + + # XXX: basic constants, these aren't strictly needed since they are considered literals by the language + 'False': False, + 'None': None, + 'True': True, + + # special constant to indicate a method is not implemented + # see: https://docs.python.org/3/library/constants.html#NotImplemented + 'NotImplemented': builtins.NotImplemented, + + # This is the same as writting `...` so there's no point in preventing it + 'Ellipsis': builtins.Ellipsis, + + # XXX: required to declare classes + # O(1) + # (func: Callable[[], CellType | Any], name: str, /, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any + '__build_class__': builtins.__build_class__, + + # XXX: required to do imports + # XXX: will trigger the execution of the imported module + # (name: str, globals: Mapping[str, object] | None = None, locals: Mapping[str, object] | None = None, + # fromlist: Sequence[str] = (), level: int = 0) -> types.ModuleType + '__import__': _generate_restricted_import_function(ALLOWED_IMPORTS), + + # XXX: also required to declare classes + # XXX: this would be '__main__' for a module that is loaded as the main entrypoint, and the module name otherwise, + # since the blueprint code is adhoc, we could as well expose something else, like '__blueprint__' + # constant + '__name__': BLUEPRINT_CLASS_NAME, + + # make it always True, which is how we'll normally run anyway + '__debug__': True, + + # empty docs + '__doc__': '', + + # this means the module that loaded the code, it can be None so we just set it to None + '__loader__': None, + + # this can be None + '__package__': None, + + # this can also be None + '__spec__': None, + + # O(1) + # (x: SupportsAbs[T], /) -> T + 'abs': builtins.abs, + + # XXX: consumes an iterable when calling + # O(N) for N=len(iterable) + # (iterable: Iterable[object], /) -> bool + 'all': custom_all, + + # XXX: consumes an iterable when calling + # O(N) for N=len(iterable) + # (iterable: Iterable[object], /) -> bool + 'any': custom_any, + + # O(1) + # (number: int | SupportsIndex, /) -> str + 'bin': builtins.bin, + + # O(1) + # type bool(int) + 'bool': builtins.bool, + + # XXX: consumes an iterable when calling + # O(N) for N=len(iterable) + # type bytearray(MutableSequence[int]) + 'bytearray': builtins.bytearray, + + # XXX: consumes an iterable when calling + # O(N) for N=len(iterable) + # type bytes(Sequence[int]) + 'bytes': builtins.bytes, + + # O(1) + # (obj: object, /) -> bool + 'callable': builtins.callable, + + # O(1) + # (i: int, /) -> str + 'chr': builtins.chr, + + # O(1) + # decorator + 'classmethod': builtins.classmethod, + + # O(1) + # type complex + 'complex': builtins.complex, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterable) + # type dict(MutableMapping[K, V]) + # () -> dict + # (**kwargs: V) -> dict[str, V] + # (map: SupportsKeysAndGetItem[K, V], /) -> dict[K, V] + # (map: SupportsKeysAndGetItem[str, V], /, **kwargs: V) -> dict[K, V] + # (iterable: Iterable[tuple[K, V]], /) -> dict[K, V] + # (iterable: Iterable[tuple[str, V]], /, **kwargs: V) -> dict[str, V] + # (iterable: Iterable[list[str]], /) -> dict[str, str] + # (iterable: Iterable[list[bytes]], /) -> dict[bytes, bytes] + 'dict': builtins.dict, + + # O(1) + # (x: SupportsDivMod[T, R], y: T, /) -> R + # (x: T, y: SupportsRDivMod[T, R], /) -> R + 'divmod': builtins.divmod, + + # O(1) + # (iterable: Iterable[T], start: int = 0) -> enumerate(Iterator[T]) + 'enumerate': enumerate, + + # O(1) + # (function: None, iterable: Iterable[T | None], /) -> filter(Iterator[T]) + # (function: Callable[[S], TypeGuard[T]], iterable: Iterable[S], /) -> filter(Iterator[T]) + # (function: Callable[[S], TypeIs[T]], iterable: Iterable[S], /) -> filter(Iterator[T]) + # (function: Callable[[T], Any], iterable: Iterable[T], /) -> filter(Iterator[T]) + 'filter': builtins.filter, + + # O(1) + # type float + 'float': builtins.float, + + # O(N) for N=len(value) + # (value: object, format_spec: str = "", /) -> str + 'format': builtins.format, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterable) + # type frozenset(AbstractSet[T]) + # () -> frozenset + # (iterable: Iterable[T], /) -> frozenset[T] + 'frozenset': builtins.frozenset, + + # O(1) + # __hash__ shortcut + # (obj: object, /) -> int + 'hash': builtins.hash, + + # O(1) + # (number: int | SupportsIndex, /) -> str + 'hex': builtins.hex, + + # We allow `isinstance()` checks + 'isinstance': builtins.isinstance, + + # O(1) various -> int + # (x: ConvertibleToInt = ..., /) -> int + # (x: str | bytes | bytearray, /, base: SupportsIndex) -> int + 'int': builtins.int, + + # O(1) + # __iter__ shortcut + # (object: SupportsIter[I], /) -> I + # (object: GetItemIterable[T], /) -> Iterator[T] + # (object: Callable[[], T | None], sentinel: None, /) -> Iterator[T] + # (object: Callable[[], T], sentinel: object, /) -> Iterator[T] + 'iter': builtins.iter, + + # O(1) + # (obj: Sized, /) -> int + 'len': builtins.len, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterable) + # () -> list + # (iterable: Iterable[T], /) -> list[T] + 'list': builtins.list, + + # O(1) + # type map + # (func: Callable[[T], S], iter: Iterable[T], /) -> map[S] + # (func: Callable[[T1, T2], S], iter1: Iterable[T1], iter2: Iterable[T2], /) -> map[S] + # ... + # (func: Callable[[T1, ..., TN], S], iter1: Iterable[T1], ..., iterN: Iterable[TN],/) -> map[S] + 'map': builtins.map, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterables) + # (arg1: T, arg2: T, /, *_args: T, key: None = None) -> T + # (arg1: T, arg2: T, /, *_args: T, key: Callable[[T], T]) -> T + # (iterable: Iterable[T], /, *, key: None = None) -> T + # (iterable: Iterable[T], /, *, key: Callable[[T], T]) -> T + # (iterable: Iterable[T], /, *, key: None = None, default: T) -> T + # (iterable: Iterable[T], /, *, key: Callable[[T], T], default: T) -> T + 'max': builtins.max, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterables) + # (arg1: T, arg2: T, /, *_args: T, key: None = None) -> T + # (arg1: T, arg2: T, /, *_args: T, key: Callable[[T], T]) -> T + # (iterable: Iterable[T], /, *, key: None = None) -> T + # (iterable: Iterable[T], /, *, key: Callable[[T], T]) -> T + # (iterable: Iterable[T], /, *, key: None = None, default: T) -> T + # (iterable: Iterable[T], /, *, key: Callable[[T], T], default: T) -> T + 'min': builtins.min, + + # O(1) + # __next__ shortcut + # (i: SupportsNext[T], /) -> T + # (i: SupportsNext[T], default: V, /) -> T | V + 'next': builtins.next, + + # O(1) + # (number: int | SupportsIndex, /) -> str + 'oct': builtins.object, + + # O(1) + # (c: str | bytes | bytearray, /) -> int + 'ord': builtins.ord, + + # XXX: can be used to easily make large numbers + # O(1) + # (base: int, exp: int, mod: int) -> int + 'pow': builtins.pow, + + # XXX: generator that escapes the VM + # O(1) + # type range(Sequence[int]) + # (stop: SupportsIndex, /) -> range + # (start: SupportsIndex, stop: SupportsIndex, step: SupportsIndex = ..., /) -> range + 'range': custom_range, + + # XXX: can consume an iterator when calling + # O(N) for N=len(sequence) + # type reversed(Iterator[T]) + # (sequence: Reversible[T], /) -> reversed[T] + # (sequence: SupportsLenAndGetItem[T], /) -> reversed[T] + 'reversed': builtins.reversed, + + # O(1) + # (number: SupportsRound1[T], ndigits: None = None) -> T + # (number: SupportsRound2[T], ndigits: SupportsIndex) -> T + 'round': builtins.round, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterable) + # type set(MutableSet[T]) + # () -> set + # (iterable: Iterable[T], /) -> set[T] + 'set': builtins.set, + + # O(1) + # type slice(Generic[A, B, C]) + # (stop: int | None, /) -> slice[int | MaybeNone, int | MaybeNone, int | MaybeNone] + 'slice': builtins.slice, + + # XXX: consumes an iterator when calling + # O(N*log(N)) for N=len(iterable) + # (iterable: Iterable[T], /, *, key: None = None, reverse: bool = False) -> list[T] + # (iterable: Iterable[T], /, *, key: Callable[[T], T], reverse: bool = False) -> list[T] + 'sorted': builtins.sorted, + + # O(1) + # type staticmethod(Generic[P, R]) + # (f: Callable[P, R], /) -> staticmethod[P, R] + 'staticmethod': builtins.staticmethod, + + # O(1) + # __str__ shortcut + # (object: object = ...) -> str + # (object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> str + 'str': builtins.str, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterable) + # (iterable: Iterable[bool], /, start: int = 0) -> int + # (iterable: Iterable[T], /) -> T + # (iterable: Iterable[T], /, start: T) -> T + 'sum': builtins.sum, + + # XXX: consumes an iterator when calling + # O(N) for N=len(iterable) + # type tuple(Sequence[T]) + # (iterable: Iterable[T] = ..., /) -> tuple[T] + 'tuple': builtins.tuple, + + # O(1) + # type zip(Iterator[T]) + # (iter: Iterable[T], /, *, strict: bool = ...) -> zip[tuple[T]] + # (iter1: Iterable[T1], iter2: Iterable[T2], /, *, strict: bool = ...) -> zip[tuple[T1, T2]] + # ... + # (iter1: Iterable[T1], ..., iterN: Iterable[TN], /, *, strict: bool = ...) -> zip[tuple[T1, ..., TN]] + 'zip': builtins.zip, + + # these exceptions aren't available in Python 3.10, so don't expose them + # 'BaseExceptionGroup': builtins.BaseExceptionGroup, + # 'ExceptionGroup': builtins.ExceptionGroup, + + # expose all other exception types: + 'ArithmeticError': builtins.ArithmeticError, + 'AssertionError': builtins.AssertionError, + 'AttributeError': builtins.AttributeError, + 'BaseException': builtins.BaseException, + 'BlockingIOError': builtins.BlockingIOError, + 'BrokenPipeError': builtins.BrokenPipeError, + 'BufferError': builtins.BufferError, + 'ChildProcessError': builtins.ChildProcessError, + 'ConnectionAbortedError': builtins.ConnectionAbortedError, + 'ConnectionError': builtins.ConnectionError, + 'ConnectionRefusedError': builtins.ConnectionRefusedError, + 'ConnectionResetError': builtins.ConnectionResetError, + 'EOFError': builtins.EOFError, + 'EnvironmentError': builtins.EnvironmentError, + 'Exception': builtins.Exception, + 'FileExistsError': builtins.FileExistsError, + 'FileNotFoundError': builtins.FileNotFoundError, + 'FloatingPointError': builtins.FloatingPointError, + 'GeneratorExit': builtins.GeneratorExit, + 'IOError': builtins.IOError, + 'ImportError': builtins.ImportError, + 'IndentationError': builtins.IndentationError, + 'IndexError': builtins.IndexError, + 'InterruptedError': builtins.InterruptedError, + 'IsADirectoryError': builtins.IsADirectoryError, + 'KeyError': builtins.KeyError, + 'KeyboardInterrupt': builtins.KeyboardInterrupt, + 'LookupError': builtins.LookupError, + 'MemoryError': builtins.MemoryError, + 'ModuleNotFoundError': builtins.ModuleNotFoundError, + 'NameError': builtins.NameError, + 'NotADirectoryError': builtins.NotADirectoryError, + 'NotImplementedError': builtins.NotImplementedError, + 'OSError': builtins.OSError, + 'OverflowError': builtins.OverflowError, + 'PermissionError': builtins.PermissionError, + 'ProcessLookupError': builtins.ProcessLookupError, + 'RecursionError': builtins.RecursionError, + 'ReferenceError': builtins.ReferenceError, + 'RuntimeError': builtins.RuntimeError, + 'StopAsyncIteration': builtins.StopAsyncIteration, + 'StopIteration': builtins.StopIteration, + 'SyntaxError': builtins.SyntaxError, + 'SystemError': builtins.SystemError, + 'SystemExit': builtins.SystemExit, + 'TabError': builtins.TabError, + 'TimeoutError': builtins.TimeoutError, + 'TypeError': builtins.TypeError, + 'UnboundLocalError': builtins.UnboundLocalError, + 'UnicodeDecodeError': builtins.UnicodeDecodeError, + 'UnicodeEncodeError': builtins.UnicodeEncodeError, + 'UnicodeError': builtins.UnicodeError, + 'UnicodeTranslateError': builtins.UnicodeTranslateError, + 'ValueError': builtins.ValueError, + 'ZeroDivisionError': builtins.ZeroDivisionError, + + # expose all warning types: + 'BytesWarning': builtins.BytesWarning, + 'DeprecationWarning': builtins.DeprecationWarning, + 'EncodingWarning': builtins.EncodingWarning, + 'FutureWarning': builtins.FutureWarning, + 'ImportWarning': builtins.ImportWarning, + 'PendingDeprecationWarning': builtins.PendingDeprecationWarning, + 'ResourceWarning': builtins.ResourceWarning, + 'RuntimeWarning': builtins.RuntimeWarning, + 'SyntaxWarning': builtins.SyntaxWarning, + 'UnicodeWarning': builtins.UnicodeWarning, + 'UserWarning': builtins.UserWarning, + 'Warning': builtins.Warning, + + # These other builtins are NOT exposed: + # ===================================== + + # XXX: async is disabled + 'aiter': _generate_disabled_builtin_func('aiter'), + + # XXX: async is disabled + 'anext': _generate_disabled_builtin_func('anext'), + + # XXX: used to call sys.breakpointhook, must not be allowed, or we expose a function that raises an exception + 'breakpoint': _generate_disabled_builtin_func('breakpoint'), + + # XXX: used to compile dynamic code, must not be allowed + 'compile': _generate_disabled_builtin_func('compile'), + + # XXX: might be harmless, but it's a _Printer and printing is disabled + 'copyright': _generate_disabled_builtin_func('copyright'), + + # XXX: might be harmless, but it's a _Printer and printing is disabled + 'credits': _generate_disabled_builtin_func('credits'), + + # XXX: used to alter attributes dynamically, must not be allowed + 'delattr': _generate_disabled_builtin_func('delattr'), + + # XXX: used to list attributes dynamically, must not be allowed + 'dir': _generate_disabled_builtin_func('dir'), + + # XXX: used to run dynamic code, must not be allowed + 'eval': _generate_disabled_builtin_func('eval'), + + # XXX: used to run dynamic code, must not be allowed + 'exec': _generate_disabled_builtin_func('exec'), + + # XXX: used to raise SystemExit exception to close the process, we could make it raise a NCFail + 'exit': _generate_disabled_builtin_func('exit'), + + # XXX: used to dynamically get an attribute, must not be allowed + 'getattr': _generate_disabled_builtin_func('getattr'), + + # XXX: used to dynamically list variables in the global scope, we already restrict those, so it might be fine + 'globals': _generate_disabled_builtin_func('globals'), + + # XXX: used to dynamically check if an attribute exists, must not be allowed + 'hasattr': _generate_disabled_builtin_func('hasattr'), + + # XXX: interactive helper, but interactivity is not allowed + 'help': _generate_disabled_builtin_func('help'), + + # XXX: used to get the address of an object, which allows a blueprint to not be a pure function + 'id': _generate_disabled_builtin_func('id'), + + # XXX: interactive input, but interactivity is not allowed + 'input': _generate_disabled_builtin_func('input'), + + # XXX: could be used to introspect on the objects we provide, disallow it just in case + 'issubclass': _generate_disabled_builtin_func('issubclass'), + + # XXX: might be harmless, but it's a _Printer and printing is disabled + 'license': _generate_disabled_builtin_func('license'), + + # XXX: used to dynamically access all local variables, could be fine, but restrict it just in case + 'locals': _generate_disabled_builtin_func('locals'), + + # XXX: used for the low level buffer protocol, disallow it just in case + 'memoryview': _generate_disabled_builtin_func('memoryview'), + + # XXX: used to open files, which is not allowed, maybe expose a dummy function that always fails + 'open': _generate_disabled_builtin_func('open'), + + # XXX: used for printing, which is not allowed, we could expose a function that does logging to help with debugging + 'print': _generate_disabled_builtin_func('print'), + + # XXX: same as exit function + 'quit': _generate_disabled_builtin_func('quit'), + + # XXX: used to dynamically set attributes, must not be allowed + 'setattr': _generate_disabled_builtin_func('setattr'), + + # XXX: can be used to inspect an object's attributes, including "private" ones + 'vars': _generate_disabled_builtin_func('vars'), + + # XXX: disallow just in case + # O(1) + # __repr__ shortcut + # (obj: object, /) -> str + 'ascii': _generate_disabled_builtin_func('ascii'), + + # XXX: disallow just in case + # O(1) + # __repr__ shortcut + # (obj: object, /) -> str + 'repr': _generate_disabled_builtin_func('repr'), + + # XXX: can be used to hide explicit function calls, not sure if this is a problem + # O(1) + # type property + # ( + # fget: Callable[[Any], Any] | None = ..., + # fset: Callable[[Any, Any], None] | None = ..., + # fdel: Callable[[Any], None] | None = ..., + # doc: str | None = ..., + # ) -> property + 'property': _generate_disabled_builtin_func('property'), + + # XXX: Can be used to get an object's class and its metaclass + # O(1) + # type type + # (o: object, /) -> type + # (name: str, bases: tuple[type, ...], namespace: dict[str, Any], /, **kwds: Any) -> T(type) + 'type': _generate_disabled_builtin_func('type'), + + # XXX: Root object which contains dangerous methods such as `__setattr__` + # O(1) + # type object + # () -> object + 'object': _generate_disabled_builtin_func('object'), + + # XXX: Can be used to get the root `object` + # O(1) + # type super + # (t: Any, obj: Any, /) -> super + # (t: Any, /) -> super + # () -> super + 'super': _generate_disabled_builtin_func('super'), +} diff --git a/hathor/nanocontracts/exception.py b/hathor/nanocontracts/exception.py new file mode 100644 index 000000000..ac23763a4 --- /dev/null +++ b/hathor/nanocontracts/exception.py @@ -0,0 +1,200 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.exception import HathorError +from hathor.transaction.exceptions import TxValidationError + + +class BlueprintSyntaxError(SyntaxError): + """Raised when a blueprint contains invalid syntax.""" + pass + + +class NCError(HathorError): + """Base exception for nano contract's exceptions.""" + pass + + +class NCTxValidationError(TxValidationError): + pass + + +class NCInvalidSignature(NCTxValidationError): + pass + + +class NCInvalidPubKey(NCTxValidationError): + pass + + +class NCFail(NCError): + """Raised by Blueprint's methods to fail execution.""" + + +class NanoContractDoesNotExist(NCFail): + pass + + +class BlueprintDoesNotExist(NCFail): + pass + + +class NCSerializationError(NCFail): + pass + + +class NCSerializationArgTooLong(NCSerializationError): + pass + + +class NCSerializationTypeError(NCSerializationError): + pass + + +class NCViewMethodError(NCFail): + """Raised when a view method changes the state of the contract.""" + pass + + +class NCMethodNotFound(NCFail): + """Raised when a method is not found in a nano contract.""" + pass + + +class NCInsufficientFunds(NCFail): + """Raised when there is not enough funds to withdrawal from a nano contract.""" + pass + + +class NCAttributeError(NCFail): + pass + + +class NCInvalidContext(NCFail): + """Raised when trying to run a method with an invalid context.""" + pass + + +class NCRecursionError(NCFail): + """Raised when recursion gets too deep.""" + + +class NCNumberOfCallsExceeded(NCFail): + """Raised when the total number of calls have been exceeded.""" + + +class NCInvalidContractId(NCFail): + """Raised when a contract call is invalid.""" + + +class NCInvalidMethodCall(NCFail): + """Raised when a contract calls another contract's invalid method.""" + + +class NCInvalidInitializeMethodCall(NCFail): + """Raised when a contract calls another contract's initialize method.""" + + +class NCInvalidPublicMethodCallFromView(NCFail): + """Raised when a contract calls another contract's initialize method.""" + + +class NCAlreadyInitializedContractError(NCFail): + """Raised when one tries to initialize a contract that has already been initialized.""" + + +class NCUninitializedContractError(NCFail): + """Raised when a contract calls a method from an uninitialized contract.""" + + +class NCInvalidAction(NCFail): + """Raised when an action is invalid.""" + pass + + +class NCInvalidSyscall(NCFail): + """Raised when a syscall is invalid.""" + pass + + +class NCTokenAlreadyExists(NCFail): + """Raised when one tries to create a duplicated token.""" + + +class NCForbiddenAction(NCFail): + """Raised when an action is forbidden on a method.""" + pass + + +class UnknownFieldType(NCError): + """Raised when there is no field available for a given type.""" + pass + + +class NCContractCreationNotFound(NCError): + """Raised when a nano contract creation transaction is not found. + + This error might also happen when the transaction is at the mempool or when it fails execution.""" + pass + + +class NCContractCreationAtMempool(NCContractCreationNotFound): + """Raised when a nano contract creation transaction is at the mempool, so it has not been + executed yet.""" + pass + + +class NCContractCreationVoided(NCContractCreationNotFound): + """Raised when a nano contract creation transaction is voided. + + The two most common reasons to have a voided transaction is because it was voided by + another transaction (e.g., double spending) or it has failed execution.""" + pass + + +class OCBInvalidScript(NCError): + """Raised when an On-Chain Blueprint script does not pass our script restrictions check. + """ + pass + + +class OCBInvalidBlueprintVertexType(NCError): + """Raised when a vertex that is not an OnChainBlueprint is used as a blueprint-id. + """ + pass + + +class OCBBlueprintNotConfirmed(NCError): + """Raised when trying to use an OnChainBlueprint that is not confirmed by a block in the current best chain. + """ + + +class OCBPubKeyNotAllowed(NCError): + """Raised when an OnChainBlueprint transaction uses a pubkey that is not explicitly allowed in the settings. + """ + + +class OCBOutOfFuelDuringLoading(NCError): + """Raised when loading an On-chain Blueprint and the execution exceeds the fuel limit. + """ + + +class OCBOutOfMemoryDuringLoading(NCError): + """Raised when loading an On-chain Blueprint and the execution exceeds the memory limit. + """ + + +class NCDisabledBuiltinError(NCError): + """Raised when a disabled builtin is used during creation or execution of a nanocontract. + """ diff --git a/hathor/nanocontracts/fields/__init__.py b/hathor/nanocontracts/fields/__init__.py new file mode 100644 index 000000000..794dc269d --- /dev/null +++ b/hathor/nanocontracts/fields/__init__.py @@ -0,0 +1,61 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from typing import TypeVar + +from hathor.nanocontracts.fields.deque_field import DequeField +from hathor.nanocontracts.fields.dict_field import DictField +from hathor.nanocontracts.fields.field import Field +from hathor.nanocontracts.fields.set_field import SetField +from hathor.nanocontracts.fields.utils import TypeToFieldMap +from hathor.nanocontracts.nc_types import DEFAULT_TYPE_ALIAS_MAP, FIELD_TYPE_TO_NC_TYPE_MAP +from hathor.nanocontracts.nc_types.utils import TypeAliasMap, TypeToNCTypeMap + +__all__ = [ + 'TYPE_TO_FIELD_MAP', + 'DequeField', + 'DictField', + 'Field', + 'SetField', + 'TypeToFieldMap', + 'make_field_for_type', +] + +T = TypeVar('T') + +TYPE_TO_FIELD_MAP: TypeToFieldMap = { + dict: DictField, + list: DequeField, # XXX: we should really make a ListField, a deque is different from a list + set: SetField, + deque: DequeField, + # XXX: other types fallback to FIELD_TYPE_TO_NC_TYPE_MAP +} + + +def make_field_for_type( + name: str, + type_: type[T], + /, + *, + type_field_map: TypeToFieldMap = TYPE_TO_FIELD_MAP, + type_nc_type_map: TypeToNCTypeMap = FIELD_TYPE_TO_NC_TYPE_MAP, + type_alias_map: TypeAliasMap = DEFAULT_TYPE_ALIAS_MAP, +) -> Field[T]: + """ Like Field.from_name_and_type, but with default maps. + + Default arguments can't be easily added to NCType.from_type signature because of recursion. + """ + type_map = Field.TypeMap(type_alias_map, type_nc_type_map, type_field_map) + return Field.from_name_and_type(name, type_, type_map=type_map) diff --git a/hathor/nanocontracts/fields/container_field.py b/hathor/nanocontracts/fields/container_field.py new file mode 100644 index 000000000..4558562a0 --- /dev/null +++ b/hathor/nanocontracts/fields/container_field.py @@ -0,0 +1,131 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import Container +from typing import Generic, TypeVar + +from typing_extensions import TYPE_CHECKING, Self, override + +from hathor.nanocontracts.fields.field import Field +from hathor.nanocontracts.storage import NCContractStorage +from hathor.util import not_none +from hathor.utils.typing import InnerTypeMixin, get_origin + +if TYPE_CHECKING: + from hathor.nanocontracts.blueprint import Blueprint + +C = TypeVar('C', bound=Container) + +KEY_SEPARATOR: str = ':' + + +class StorageContainer(Generic[C], ABC): + """ Abstraction over the class that will be returned when accessing a container field. + + Every method and property in this class should use either `__dunder` or `__special__` naming pattern, because + otherwise the property/method would be accessible from an OCB. Even if there would be no harm, this is generally + avoided. + """ + __slots__ = () + + @classmethod + @abstractmethod + def __check_name_and_type__(cls, name: str, type_: type[C]) -> None: + """Should raise a TypeError if the given name or type is incompatible for use with container.""" + raise NotImplementedError + + @classmethod + @abstractmethod + def __from_name_and_type__( + cls, + storage: NCContractStorage, + name: str, + type_: type[C], + /, + *, + type_map: Field.TypeMap, + ) -> Self: + """Every StorageContainer should be able to be built with this signature. + + Expect a type that has been previously checked with `cls.__check_name_and_type__`. + """ + raise NotImplementedError + + +T = TypeVar('T', bound=StorageContainer) + + +class ContainerField(InnerTypeMixin[T], Field[T]): + """ This class models a Field with a StorageContainer, it can't be set, only accessed as a container. + + This is modeled after a Python descriptor, similar to the built in `property`, see: + + - https://docs.python.org/3/reference/datamodel.html#implementing-descriptors + + The observed value behaves like a container, the specific behavior depends on the container type. + """ + + __slots__ = ('__name', '__type', '__type_map') + __name: str + __type: type[T] + __type_map: Field.TypeMap + + # XXX: customize InnerTypeMixin behavior so it stores the origin type, since that's what we want + @classmethod + def __extract_inner_type__(cls, args: tuple[type, ...], /) -> type[T]: + inner_type: type[T] = InnerTypeMixin.__extract_inner_type__(args) + return not_none(get_origin(inner_type)) + + @override + @classmethod + def _from_name_and_type(cls, name: str, type_: type[T], /, *, type_map: Field.TypeMap) -> Self: + if not issubclass(cls.__inner_type__, StorageContainer): + raise TypeError(f'{cls.__inner_type__} is not a StorageContainer') + cls.__inner_type__.__check_name_and_type__(name, type_) + field = cls() + field.__name = name + field.__type = type_ + field.__type_map = type_map + return field + + @override + def __set__(self, instance: Blueprint, value: T) -> None: + # XXX: alternatively this could mimick a `my_container.clear(); my_container.update(value)` + raise AttributeError('cannot set a container field') + + @override + def __get__(self, instance: Blueprint, owner: object | None = None) -> T: + cache = instance.syscall.__cache__ + if cache is not None and (obj := cache.get(self.__name)): + return obj + + # XXX: ideally we would instantiate the storage within _from_name_and_type, but we need the blueprint instance + # and we only have access to it when __get__ is called the first time + storage = self.__inner_type__.__from_name_and_type__( + instance.syscall.__storage__, + self.__name, + self.__type, + type_map=self.__type_map, + ) + if cache is not None: + cache[self.__name] = storage + return storage + + @override + def __delete__(self, instance: Blueprint) -> None: + # XXX: alternatively delete the database + raise AttributeError('cannot delete a container field') diff --git a/hathor/nanocontracts/fields/deque_field.py b/hathor/nanocontracts/fields/deque_field.py new file mode 100644 index 000000000..2f2b449e6 --- /dev/null +++ b/hathor/nanocontracts/fields/deque_field.py @@ -0,0 +1,292 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from collections.abc import Iterable, Iterator, Sequence +from dataclasses import dataclass, replace +from typing import ClassVar, SupportsIndex, TypeVar, get_args, get_origin + +from typing_extensions import Self, override + +from hathor.nanocontracts.fields.container_field import KEY_SEPARATOR, ContainerField, StorageContainer +from hathor.nanocontracts.fields.field import Field +from hathor.nanocontracts.nc_types import NCType, VarInt32NCType +from hathor.nanocontracts.nc_types.dataclass_nc_type import make_dataclass_opt_nc_type +from hathor.nanocontracts.storage import NCContractStorage +from hathor.util import not_none + +T = TypeVar('T') +_METADATA_KEY: str = '__metadata__' +_INDEX_NC_TYPE = VarInt32NCType() + +# TODO: support maxlen (will require support for initialization values) + + +@dataclass(slots=True, frozen=True, kw_only=True) +class _DequeMetadata: + first_index: int = 0 + length: int = 0 + reversed: bool = False + + @property + def last_index(self) -> int: + return self.first_index + self.length - 1 + + +_METADATA_NC_TYPE = make_dataclass_opt_nc_type(_DequeMetadata) + + +class DequeStorageContainer(StorageContainer[Sequence[T]]): + # from https://github.com/python/typeshed/blob/main/stdlib/collections/__init__.pyi + __slots__ = ('__storage', '__name', '__value', '__metadata_key') + __storage: NCContractStorage + __name: str + __value: NCType[T] + __metadata_key: bytes + + def __init__(self, storage: NCContractStorage, name: str, value: NCType[T]) -> None: + self.__storage = storage + self.__name = name + self.__value = value + self.__metadata_key = f'{name}{KEY_SEPARATOR}{_METADATA_KEY}'.encode() + + # Methods needed by StorageContainer: + + @override + @classmethod + def __check_name_and_type__(cls, name: str, type_: type[Sequence[T]]) -> None: + if not name.isidentifier(): + raise TypeError('field name must be a valid identifier') + origin_type: type[Sequence[T]] = not_none(get_origin(type_)) + if not issubclass(origin_type, Sequence): + raise TypeError('expected Sequence type') + args = get_args(type_) + if not args or len(args) != 1: + raise TypeError(f'expected {type_.__name__}[]') + + @override + @classmethod + def __from_name_and_type__( + cls, + storage: NCContractStorage, + name: str, + type_: type[Sequence[T]], + /, + *, + type_map: Field.TypeMap, + ) -> 'Self': + item_type, = get_args(type_) + item_nc_type = NCType.from_type(item_type, type_map=type_map.to_nc_type_map()) + return cls(storage, name, item_nc_type) + + # INTERNAL METHODS: all of these must be __dunder_methods so they aren't accessible from an OCB + + def __to_db_key(self, index: SupportsIndex) -> bytes: + return f'{self.__name}{KEY_SEPARATOR}'.encode() + _INDEX_NC_TYPE.to_bytes(index.__index__()) + + def __get_metadata(self) -> _DequeMetadata: + metadata = self.__storage.get_obj(self.__metadata_key, _METADATA_NC_TYPE, default=None) + + if metadata is None: + metadata = _DequeMetadata() + self.__storage.put_obj(self.__metadata_key, _METADATA_NC_TYPE, metadata) + + assert isinstance(metadata, _DequeMetadata) + return metadata + + def __update_metadata(self, new_metadata: _DequeMetadata) -> None: + assert new_metadata.length >= 0 + if new_metadata.length == 0: + return self.__storage.del_obj(self.__metadata_key) + self.__storage.put_obj(self.__metadata_key, _METADATA_NC_TYPE, new_metadata) + + def __extend(self, *, items: Iterable[T], metadata: _DequeMetadata) -> None: + new_last_index = metadata.last_index + for item in items: + new_last_index += 1 + key = self.__to_db_key(new_last_index) + self.__storage.put_obj(key, self.__value, item) + new_metadata = replace(metadata, length=new_last_index - metadata.first_index + 1) + self.__update_metadata(new_metadata) + + def __extendleft(self, *, items: Iterable[T], metadata: _DequeMetadata) -> None: + new_first_index = metadata.first_index + for item in items: + new_first_index -= 1 + key = self.__to_db_key(new_first_index) + self.__storage.put_obj(key, self.__value, item) + new_metadata = replace( + metadata, + first_index=new_first_index, + length=metadata.last_index - new_first_index + 1, + ) + self.__update_metadata(new_metadata) + + def __pop(self, *, metadata: _DequeMetadata, left: bool) -> T: + if metadata.length == 0: + raise IndexError + + index = metadata.first_index if left else metadata.last_index + key = self.__to_db_key(index) + item = self.__storage.get_obj(key, self.__value) + self.__storage.del_obj(key) + new_metadata = replace( + metadata, + first_index=metadata.first_index + 1 if left else metadata.first_index, + length=metadata.length - 1 + ) + self.__update_metadata(new_metadata) + return item + + def __to_internal_index(self, *, index: SupportsIndex) -> int: + metadata = self.__get_metadata() + idx = index.__index__() + + if idx < 0: + idx += metadata.length + + if idx < 0 or idx >= metadata.length: + raise IndexError + + return metadata.last_index - idx if metadata.reversed else metadata.first_index + idx + + # Methods needed by MutableSequence and Sequence: + + def __getitem__(self, index: SupportsIndex, /) -> T: + internal_index = self.__to_internal_index(index=index) + key = self.__to_db_key(internal_index) + return self.__storage.get_obj(key, self.__value) + + def __len__(self) -> int: + return self.__get_metadata().length + + def __setitem__(self, index: SupportsIndex, value: T, /) -> None: + internal_index = self.__to_internal_index(index=index) + key = self.__to_db_key(internal_index) + self.__storage.put_obj(key, self.__value, value) + + def __delitem__(self, key: SupportsIndex, /) -> None: + raise NotImplementedError + + def insert(self, i: int, x: T, /) -> None: + raise NotImplementedError + + # Methods provided by Sequence (currently not implemented): + + # def index(self, x: T, start: int = 0, stop: int = ..., /) -> int: ... + # def count(self, x: T, /) -> int: ... + # def __contains__(self, key: object, /) -> bool: ... + # def __iter__(self) -> Iterator[_T_co]: ... + # def __reversed__(self) -> None: + + # Methods provided by MutableSequence (currently not implemented): + + # def append(self, x: T, /) -> None: ... + # def clear(self) -> None: ... + # def extend(self, iterable: Iterable[T], /) -> None: ... + # def reverse(self) -> None: + # def pop(self) -> T: ... # type: ignore[override] + # def remove(self, value: T, /) -> None: ... + # def __iadd__(self, value: Iterable[T], /) -> Self: ... + + # out of those, we specialize these: + + def append(self, item: T, /) -> None: + self.extend((item,)) + + def extend(self, items: Iterable[T], /) -> None: + metadata = self.__get_metadata() + if metadata.reversed: + return self.__extendleft(items=items, metadata=metadata) + self.__extend(items=items, metadata=metadata) + + def pop(self) -> T: + metadata = self.__get_metadata() + return self.__pop(metadata=metadata, left=metadata.reversed) + + def reverse(self) -> None: + metadata = self.__get_metadata() + new_metadata = replace(metadata, reversed=not metadata.reversed) + self.__update_metadata(new_metadata) + + def __iter__(self) -> Iterator[T]: + metadata = self.__get_metadata() + indexes = range(metadata.first_index, metadata.last_index + 1) + + if metadata.reversed: + indexes = range(metadata.last_index, metadata.first_index - 1, -1) + + for i in indexes: + key = self.__to_db_key(i) + yield self.__storage.get_obj(key, self.__value) + + # Other deque methods that we implement to look like a deque: + + @property + def maxlen(self) -> int | None: + return None + + def appendleft(self, item: T, /) -> None: + self.extendleft((item,)) + + def extendleft(self, items: Iterable[T], /) -> None: + metadata = self.__get_metadata() + if metadata.reversed: + return self.__extend(items=items, metadata=metadata) + self.__extendleft(items=items, metadata=metadata) + + def popleft(self) -> T: + metadata = self.__get_metadata() + return self.__pop(metadata=metadata, left=not metadata.reversed) + + def copy(self) -> 'Self': + raise NotImplementedError + + def rotate(self, n: int = 1, /) -> None: + raise NotImplementedError + + def __copy__(self) -> 'Self': + raise NotImplementedError + + __hash__: ClassVar[None] # type: ignore[assignment] + + def __reduce__(self) -> tuple[type['Self'], tuple[()], None, Iterator[T]]: + raise NotImplementedError + + def __add__(self, value: 'Self', /) -> 'Self': + raise NotImplementedError + + def __mul__(self, value: int, /) -> 'Self': + raise NotImplementedError + + def __imul__(self, value: int, /) -> 'Self': + raise NotImplementedError + + def __lt__(self, value: deque[T], /) -> bool: + raise NotImplementedError + + def __le__(self, value: deque[T], /) -> bool: + raise NotImplementedError + + def __gt__(self, value: deque[T], /) -> bool: + raise NotImplementedError + + def __ge__(self, value: deque[T], /) -> bool: + raise NotImplementedError + + def __eq__(self, value: object, /) -> bool: + raise NotImplementedError + + +DequeField = ContainerField[DequeStorageContainer[T]] diff --git a/hathor/nanocontracts/fields/dict_field.py b/hathor/nanocontracts/fields/dict_field.py new file mode 100644 index 000000000..43b76925f --- /dev/null +++ b/hathor/nanocontracts/fields/dict_field.py @@ -0,0 +1,192 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections.abc import Hashable, Iterator, Mapping +from typing import TypeVar, get_args, get_origin, overload + +from typing_extensions import Self, override + +from hathor.nanocontracts.fields.container_field import KEY_SEPARATOR, ContainerField, StorageContainer +from hathor.nanocontracts.fields.field import Field +from hathor.nanocontracts.nc_types import NCType, VarUint32NCType +from hathor.nanocontracts.nc_types.utils import is_origin_hashable +from hathor.nanocontracts.storage import NCContractStorage +from hathor.util import not_none + +K = TypeVar('K', bound=Hashable) +V = TypeVar('V') +_T = TypeVar('_T') +_LENGTH_KEY: str = '__length__' +_LENGTH_NC_TYPE = VarUint32NCType() + + +class DictStorageContainer(StorageContainer[Mapping[K, V]]): + """This is a dict-like object. + + Based on the implementation of UserDict, see: + - https://github.com/python/cpython/blob/main/Lib/collections/__init__.py + """ + + __slots__ = ('__storage', '__name', '__key', '__value', '__length_key') + __storage: NCContractStorage + __name: str + __key: NCType[K] + __value: NCType[V] + __length_key: bytes + + def __init__(self, storage: NCContractStorage, name: str, key: NCType[K], value: NCType[V]) -> None: + self.__storage = storage + self.__name = name + self.__key = key + self.__value = value + self.__length_key = f'{name}{KEY_SEPARATOR}{_LENGTH_KEY}'.encode() + + # Methods needed by StorageContainer: + + @override + @classmethod + def __check_name_and_type__(cls, name: str, type_: type[Mapping[K, V]]) -> None: + if not name.isidentifier(): + raise TypeError('field name must be a valid identifier') + origin_type: type[Mapping[K, V]] = not_none(get_origin(type_)) + if not issubclass(origin_type, Mapping): + raise TypeError('expected Mapping type') + args = get_args(type_) + if not args or len(args) != 2: + raise TypeError(f'expected {type_.__name__}[, ]') + key_type, value_type = args + if not is_origin_hashable(key_type): + raise TypeError(f'{key_type} is not hashable') + + @override + @classmethod + def __from_name_and_type__( + cls, + storage: NCContractStorage, + name: str, + type_: type[Mapping[K, V]], + /, + *, + type_map: Field.TypeMap, + ) -> Self: + key_type, value_type = get_args(type_) + key_nc_type = NCType.from_type(key_type, type_map=type_map.to_nc_type_map()) + assert key_nc_type.is_hashable(), 'hashable "types" must produce hashable "values"' + value_nc_type = NCType.from_type(value_type, type_map=type_map.to_nc_type_map()) + return cls(storage, name, key_nc_type, value_nc_type) + + # INTERNAL METHODS: all of these must be __dunder_methods so they aren't accessible from an OCB + + def __to_db_key(self, key: K) -> bytes: + # We don't need to explicitly hash the key here, because the trie already does it internally. + return f'{self.__name}{KEY_SEPARATOR}'.encode() + self.__key.to_bytes(key) + + def __get_length(self) -> int: + return self.__storage.get_obj(self.__length_key, _LENGTH_NC_TYPE, default=0) + + def __increase_length(self) -> None: + self.__storage.put_obj(self.__length_key, _LENGTH_NC_TYPE, self.__get_length() + 1) + + def __decrease_length(self) -> None: + length = self.__get_length() + assert length > 0 + self.__storage.put_obj(self.__length_key, _LENGTH_NC_TYPE, length - 1) + + # Methods needed by MutableMapping (and to behave like a dict) + + def __len__(self) -> int: + return self.__get_length() + + def __getitem__(self, key: K, /) -> V: + # get the data from the storage + db_key = self.__to_db_key(key) + return self.__storage.get_obj(db_key, self.__value) + + def __setitem__(self, key: K, value: V, /) -> None: + if key not in self: + self.__increase_length() + # store `value` at `key` in the storage + self.__storage.put_obj(self.__to_db_key(key), self.__value, value) + + def __delitem__(self, key: K, /) -> None: + if key not in self: + return + self.__decrease_length() + # delete the key from the storage + self.__storage.del_obj(self.__to_db_key(key)) + + def __iter__(self) -> Iterator[K]: + raise NotImplementedError + + # Methods provided by MutableMapping (currently not implemented): + + # def pop(self, key, default=__marker): + # def popitem(self): + # def clear(self): + # def update(self, other=(), /, **kwds): + # def setdefault(self, key, default=None): + + # Modify __contains__ and get() to work like dict does when __missing__ is present. + + def __contains__(self, key: K, /) -> bool: + # return true if the `key` exists in the collection + try: + self[key] + except KeyError: + return False + else: + return True + + @overload + def get(self, key: K, /) -> V: + ... + + @overload + def get(self, key: K, default: V | _T | None, /) -> V | _T | None: + ... + + # XXX: `misc` is ignored because mypy thinks this function does not accept all arguments of the second get overload + def get(self, key: K, default: V | _T | None = None, /) -> V | _T | None: # type: ignore[misc] + # return the value for key if key is in the storage, else default + try: + return self[key] + except KeyError: + return default + + # Now, add the methods in dicts but not in MutableMapping + + # def __repr__(self): + # raise NotImplementedError + + def __or__(self, value, /): + raise NotImplementedError + + def __ror__(self, value, /): + raise NotImplementedError + + def __ior__(self, value, /): + raise NotImplementedError + + def __copy__(self): + raise NotImplementedError + + def copy(self): + raise NotImplementedError + + @classmethod + def fromkeys(cls, iterable, value=None, /): + raise NotImplementedError + + +DictField = ContainerField[DictStorageContainer[K, V]] diff --git a/hathor/nanocontracts/fields/field.py b/hathor/nanocontracts/fields/field.py new file mode 100644 index 000000000..6a8bf007e --- /dev/null +++ b/hathor/nanocontracts/fields/field.py @@ -0,0 +1,94 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Generic, NamedTuple, TypeVar, final, get_origin + +from typing_extensions import TYPE_CHECKING, Self + +from hathor.nanocontracts.fields.utils import TypeToFieldMap +from hathor.nanocontracts.nc_types import NCType +from hathor.nanocontracts.nc_types.utils import TypeAliasMap, TypeToNCTypeMap + +if TYPE_CHECKING: + from hathor.nanocontracts.blueprint import Blueprint + +T = TypeVar('T') + + +class Field(Generic[T], ABC): + """ This class is used to model the fields of a Blueprint from the signature that defines them. + + Fields are generally free to implement how they behave, but we have 2 types of behavior: + + - `self.foo = 1` will serialize `1` and save to db on a key derived from `'foo'` name + - `self.foo['bar'] = 'baz'` will serialize and save to db on a key derive from `('foo', 'bar')` + + Usually only one of the two patterns above is supported by a field. The base class itself only defines how to + construct a Field instance from a name and type signature, which is what the Blueprint metaclass needs. + + + OCB safety considerations: + + - A Blueprint must not be able to access a Field instance directly + """ + + class TypeMap(NamedTuple): + alias_map: TypeAliasMap + nc_types_map: TypeToNCTypeMap + fields_map: TypeToFieldMap + + def to_nc_type_map(self) -> NCType.TypeMap: + return NCType.TypeMap(self.alias_map, self.nc_types_map) + + # XXX: do we need to define field.__objclass__ for anything? + + @final + @staticmethod + def from_name_and_type(name: str, type_: type[T], /, *, type_map: TypeMap) -> Field[T]: + from hathor.nanocontracts.fields.nc_type_field import NCTypeField + + # if we have a `dict[int, int]` we use `get_origin()` to get the `dict` part, since it's a different instance + origin_type = get_origin(type_) or type_ + + if origin_type in type_map.fields_map: + field_class = type_map.fields_map[origin_type] + return field_class._from_name_and_type(name, type_, type_map=type_map) + else: + try: + return NCTypeField._from_name_and_type(name, type_, type_map=type_map) + except TypeError as e: + raise TypeError(f'type {type_} is not supported by any Field class') from e + + @classmethod + @abstractmethod + def _from_name_and_type(cls, name: str, type_: type[T], /, *, type_map: TypeMap) -> Self: + raise NotImplementedError + + @abstractmethod + def __set__(self, instance: Blueprint, value: T) -> None: + # called when doing `instance.field = value` + raise NotImplementedError + + @abstractmethod + def __get__(self, instance: Blueprint, owner: object | None = None) -> T: + # called when doing `instance.field` as an expression + raise NotImplementedError + + @abstractmethod + def __delete__(self, instance: Blueprint) -> None: + # called when doing `del instance.field` + raise NotImplementedError diff --git a/hathor/nanocontracts/fields/nc_type_field.py b/hathor/nanocontracts/fields/nc_type_field.py new file mode 100644 index 000000000..26c9027ca --- /dev/null +++ b/hathor/nanocontracts/fields/nc_type_field.py @@ -0,0 +1,68 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TypeVar + +from typing_extensions import Self + +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.fields.field import Field +from hathor.nanocontracts.nc_types import NCType + +T = TypeVar('T') + + +class NCTypeField(Field[T]): + """ This class models a Field after a NCType, where acessing the field implies deserializing the value from the db. + + This is modeled after a Python descriptor, similar to the built in `property`, see: + + - https://docs.python.org/3/reference/datamodel.html#implementing-descriptors + """ + __slots__ = ('__name', '__nc_type') + + __name: str + __nc_type: NCType[T] + + @classmethod + def _from_name_and_type(cls, name: str, type_: type[T], /, *, type_map: Field.TypeMap) -> Self: + field = cls() + field.__name = name + field.__nc_type = NCType.from_type(type_, type_map=type_map.to_nc_type_map()) + return field + + def __storage_key(self) -> bytes: + return self.__name.encode('utf-8') + + def __set__(self, instance: Blueprint, obj: T) -> None: + instance.syscall.__storage__.put_obj(self.__storage_key(), self.__nc_type, obj) + cache = instance.syscall.__cache__ + if cache is not None: + cache[self.__name] = obj + + def __get__(self, instance: Blueprint, owner: object | None = None) -> T: + cache = instance.syscall.__cache__ + if cache is not None and self.__name in cache: + return cache[self.__name] + + try: + obj = instance.syscall.__storage__.get_obj(self.__storage_key(), self.__nc_type) + if cache is not None: + cache[self.__name] = obj + return obj + except KeyError: + raise AttributeError(f'Contract has no attribute \'{self.__name}\'') + + def __delete__(self, instance: Blueprint) -> None: + instance.syscall.__storage__.del_obj(self.__storage_key()) diff --git a/hathor/nanocontracts/fields/set_field.py b/hathor/nanocontracts/fields/set_field.py new file mode 100644 index 000000000..2826aae73 --- /dev/null +++ b/hathor/nanocontracts/fields/set_field.py @@ -0,0 +1,211 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections.abc import Iterable, Iterator +from typing import Any, TypeVar, get_args, get_origin + +from typing_extensions import Self, override + +from hathor.nanocontracts.fields.container_field import KEY_SEPARATOR, ContainerField, StorageContainer +from hathor.nanocontracts.fields.field import Field +from hathor.nanocontracts.nc_types import NCType, VarUint32NCType +from hathor.nanocontracts.nc_types.utils import is_origin_hashable +from hathor.nanocontracts.storage import NCContractStorage +from hathor.util import not_none + +T = TypeVar('T') +_S = TypeVar('_S') +_NOT_PROVIDED = object() +_LENGTH_KEY: str = '__length__' +_LENGTH_NC_TYPE = VarUint32NCType() + + +class SetStorageContainer(StorageContainer[set[T]]): + # from https://github.com/python/typeshed/blob/main/stdlib/collections/__init__.pyi + # from https://github.com/python/typeshed/blob/main/stdlib/typing.pyi + + __slots__ = ('__storage', '__name', '__value', '__length_key') + __storage: NCContractStorage + __name: str + __value: NCType[T] + __length_key: bytes + + # XXX: what to do with this: + # __hash__: ClassVar[None] # type: ignore[assignment] + + def __init__(self, storage: NCContractStorage, name: str, value: NCType[T]) -> None: + self.__storage = storage + self.__name = name + self.__value = value + self.__length_key = f'{name}{KEY_SEPARATOR}{_LENGTH_KEY}'.encode() + + # Methods needed by StorageContainer: + + @override + @classmethod + def __check_name_and_type__(cls, name: str, type_: type[set[T]]) -> None: + if not name.isidentifier(): + raise TypeError('field name must be a valid identifier') + origin_type: type[set[T]] = not_none(get_origin(type_)) + if not issubclass(origin_type, set): + raise TypeError('expected set type') + args = get_args(type_) + if not args or len(args) != 1: + raise TypeError(f'expected {type_.__name__}[]') + item_type, = args + if not is_origin_hashable(item_type): + raise TypeError(f'{item_type} is not hashable') + + @override + @classmethod + def __from_name_and_type__( + cls, + storage: NCContractStorage, + name: str, + type_: type[set[T]], + /, + *, + type_map: Field.TypeMap, + ) -> Self: + item_type, = get_args(type_) + item_nc_type = NCType.from_type(item_type, type_map=type_map.to_nc_type_map()) + assert item_nc_type.is_hashable(), 'hashable "types" must produce hashable "values"' + return cls(storage, name, item_nc_type) + + def __to_db_key(self, elem: T) -> bytes: + # We don't need to explicitly hash the value here, because the trie already does it internally. + return f'{self.__name}{KEY_SEPARATOR}'.encode() + self.__value.to_bytes(elem) + + def __get_length(self) -> int: + return self.__storage.get_obj(self.__length_key, _LENGTH_NC_TYPE, default=0) + + def __increase_length(self) -> None: + self.__storage.put_obj(self.__length_key, _LENGTH_NC_TYPE, self.__get_length() + 1) + + def __decrease_length(self) -> None: + length = self.__get_length() + assert length > 0 + self.__storage.put_obj(self.__length_key, _LENGTH_NC_TYPE, length - 1) + + # required by Iterable + + def __iter__(self) -> Iterator[T]: + raise NotImplementedError + + # required bt Collection + + def __len__(self) -> int: + return self.__get_length() + + # required by AbstractSet + + def __contains__(self, elem: T, /) -> bool: + key = self.__to_db_key(elem) + return self.__storage.has_obj(key) + + # provided by Set (currently not implemented): + # + # def _hash(self) -> int: ... + # def __le__(self, other: set[Any]) -> bool: ... + # def __lt__(self, other: set[Any]) -> bool: ... + # def __gt__(self, other: set[Any]) -> bool: ... + # def __ge__(self, other: set[Any]) -> bool: ... + # def __and__(self, other: set[Any]) -> set[T]: ... + # def __or__(self, other: set[T]) -> set[T]: ... + # def __sub__(self, other: set[Any]) -> set[T]: ... + # def __xor__(self, other: set[T]) -> set[T]: ... + # def __eq__(self, other: object) -> bool: ... + # def isdisjoint(self, other: Iterable[Any]) -> bool: ... + + def isdisjoint(self, other: Iterable[Any]) -> bool: + return len(self.intersection(other)) == 0 + + # required by MutableSet + + def add(self, elem: T, /) -> None: + key = self.__to_db_key(elem) + if self.__storage.has_obj(key): + return + self.__storage.put_obj(key, self.__value, elem) + self.__increase_length() + + def discard(self, elem: T, /) -> None: + key = self.__to_db_key(elem) + if not self.__storage.has_obj(key): + return + self.__storage.del_obj(key) + self.__decrease_length() + + # provided by MutableSet (currently not implemented): + # + # def clear(self) -> None: ... + # def pop(self) -> T: ... + # def remove(self, value: T) -> None: ... + # def __ior__(self, it: set[T]) -> Self: ... # type: ignore[override,misc] + # def __iand__(self, it: set[Any]) -> Self: ... + # def __ixor__(self, it: set[T]) -> Self: ... # type: ignore[override,misc] + # def __isub__(self, it: set[Any]) -> Self: ... + + # of which we override: + + def remove(self, elem: T, /) -> None: + key = self.__to_db_key(elem) + if not self.__storage.has_obj(key): + raise KeyError + self.__storage.del_obj(key) + self.__decrease_length() + + # Additional methods to behave like a set + # see https://github.com/python/typeshed/blob/main/stdlib/builtins.pyi#L1168 + + def copy(self) -> set[T]: + raise NotImplementedError + + def difference(self, *s: Iterable[Any]) -> set[T]: + raise NotImplementedError + + def difference_update(self, *others: Iterable[Any]) -> None: + for other in others: + for elem in other: + self.discard(elem) + + # def intersection(self, *s: Iterable[Any]) -> set[T]: ... + def intersection(self, other: Iterable[Any]) -> set[T]: + return set(elem for elem in other if elem in self) + + def intersection_update(self, *s: Iterable[Any]) -> None: + raise NotImplementedError + + def issubset(self, s: Iterable[Any], /) -> bool: + raise NotImplementedError + + def issuperset(self, other: Iterable[Any]) -> bool: + return all(elem in self for elem in other) + + def symmetric_difference(self, s: Iterable[T], /) -> set[T]: + raise NotImplementedError + + def symmetric_difference_update(self, s: Iterable[T], /) -> None: + raise NotImplementedError + + def union(self, *s: Iterable[_S]) -> set[T | _S]: + raise NotImplementedError + + def update(self, *others: Iterable[T]) -> None: + for other in others: + for elem in other: + self.add(elem) + + +SetField = ContainerField[SetStorageContainer[T]] diff --git a/hathor/nanocontracts/fields/utils.py b/hathor/nanocontracts/fields/utils.py new file mode 100644 index 000000000..b03027b17 --- /dev/null +++ b/hathor/nanocontracts/fields/utils.py @@ -0,0 +1,22 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections.abc import Mapping +from typing import TYPE_CHECKING, TypeAlias + +if TYPE_CHECKING: + from hathor.nanocontracts.fields import Field + + +TypeToFieldMap: TypeAlias = Mapping[type, type['Field']] diff --git a/hathor/nanocontracts/metered_exec.py b/hathor/nanocontracts/metered_exec.py new file mode 100644 index 000000000..0097c30dd --- /dev/null +++ b/hathor/nanocontracts/metered_exec.py @@ -0,0 +1,100 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import Any, Callable, ParamSpec, TypeVar, cast + +from structlog import get_logger + +from hathor.nanocontracts.custom_builtins import EXEC_BUILTINS +from hathor.nanocontracts.on_chain_blueprint import PYTHON_CODE_COMPAT_VERSION + +logger = get_logger() + +_T = TypeVar('_T') +_P = ParamSpec('_P') + + +# https://docs.python.org/3/library/sys.html#sys.settrace +# 110 opcodes +# [x for x in dis.opname if not x.startswith('<')] +# TODO: cost for each opcode +FUEL_COST_MAP = [1] * 256 + + +class OutOfFuelError(RuntimeError): + pass + + +class OutOfMemoryError(MemoryError): + pass + + +class MeteredExecutor: + __slots__ = ('_fuel', '_memory_limit', '_debug') + + def __init__(self, fuel: int, memory_limit: int) -> None: + self._fuel = fuel + self._memory_limit = memory_limit + self._debug = False + + def get_fuel(self) -> int: + return self._fuel + + def get_memory_limit(self) -> int: + return self._memory_limit + + def exec(self, source: str, /) -> dict[str, Any]: + """ This is equivalent to `exec(source)` but with execution metering and memory limiting. + """ + env: dict[str, object] = { + '__builtins__': EXEC_BUILTINS, + } + # XXX: calling compile now makes the exec step consume less fuel + code = compile( + source=source, + filename='', + mode='exec', + flags=0, + dont_inherit=True, + optimize=0, + _feature_version=PYTHON_CODE_COMPAT_VERSION[1], + ) + # XXX: SECURITY: `code` and `env` need the proper restrictions by this point + exec(code, env) + del env['__builtins__'] + return env + + def call(self, func: Callable[_P, _T], /, *, args: _P.args) -> _T: + """ This is equivalent to `func(*args, **kwargs)` but with execution metering and memory limiting. + """ + env: dict[str, object] = { + '__builtins__': EXEC_BUILTINS, + '__func__': func, + '__args__': args, + '__result__': None, + } + # XXX: calling compile now makes the exec step consume less fuel + code = compile( + source='__result__ = __func__(*__args__)', + filename='', + mode='exec', + flags=0, + dont_inherit=True, + optimize=0, + _feature_version=PYTHON_CODE_COMPAT_VERSION[1], + ) + exec(code, env) + return cast(_T, env['__result__']) diff --git a/hathor/nanocontracts/method.py b/hathor/nanocontracts/method.py new file mode 100644 index 000000000..3ceb63ece --- /dev/null +++ b/hathor/nanocontracts/method.py @@ -0,0 +1,347 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections.abc import Callable, Iterable +from inspect import Parameter, Signature, _empty as EMPTY, signature +from types import FunctionType, MethodType +from typing import Any, TypeVar + +from typing_extensions import Self, assert_never, override + +from hathor.nanocontracts import Context +from hathor.nanocontracts.exception import NCFail, NCSerializationArgTooLong, NCSerializationError +from hathor.nanocontracts.nc_types import ( + NCType, + VarUint32NCType, + make_nc_type_for_arg_type, + make_nc_type_for_return_type, +) +from hathor.nanocontracts.utils import is_nc_public_method +from hathor.serialization import Deserializer, SerializationError, Serializer +from hathor.serialization.adapters import MaxBytesExceededError + +_num_args_nc_type = VarUint32NCType() +T = TypeVar('T') + +MAX_BYTES_SERIALIZED_ARG: int = 1000 + + +def _deserialize_map_exception(nc_type: NCType[T], data: bytes) -> T: + """ Internal handy method to deserialize `bytes` to `T` while mapping the exceptions.""" + try: + deserializer = Deserializer.build_bytes_deserializer(data) + value = nc_type.deserialize(deserializer) + deserializer.finalize() + return value + except MaxBytesExceededError as e: + raise NCSerializationArgTooLong from e + except SerializationError as e: + raise NCSerializationError from e + except NCFail: + raise + except Exception as e: + raise NCFail from e + + +def _serialize_map_exception(nc_type: NCType[T], value: T) -> bytes: + """ Internal handy method to serialize `T` to `bytes` while mapping the exceptions.""" + try: + serializer = Serializer.build_bytes_serializer() + nc_type.serialize(serializer, value) + return bytes(serializer.finalize()) + except MaxBytesExceededError as e: + raise NCSerializationArgTooLong from e + except SerializationError as e: + raise NCSerializationError from e + except NCFail: + raise + except Exception as e: + raise NCFail from e + + +class _ArgsNCType(NCType): + """ Inner implementation of a callable "args" using the NCType model. + """ + + _args: tuple[NCType, ...] + _max_bytes: int + + def __init__(self, args_nc_types: Iterable[NCType], max_bytes: int) -> None: + self._args = tuple(args_nc_types) + self._max_bytes = max_bytes + + @override + def _check_value(self, value: Any, /, *, deep: bool) -> None: + # XXX: we take either a tuple or a list as input + if not isinstance(value, (tuple, list)): + raise TypeError('expected tuple or list') + if len(value) > len(self._args): + raise TypeError('too many arguments') + if deep: + for i, arg_nc_type in zip(value, self._args): + arg_nc_type._check_value(i, deep=deep) + + @override + def _serialize(self, serializer: Serializer, args: tuple[Any, ...] | list[Any], /) -> None: + with serializer.with_max_bytes(self._max_bytes) as serializer: + num_args = len(args) + if num_args > len(self._args): + raise TypeError('too many arguments') + # XXX: default arguments are currently not supported, thus we reject too few arguments too + if num_args < len(self._args): + raise TypeError('too few arguments') + _num_args_nc_type.serialize(serializer, num_args) + for value, arg in zip(self._args, args): + value.serialize(serializer, arg) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> tuple[Any, ...]: + with deserializer.with_max_bytes(self._max_bytes) as deserializer: + # TODO: normalize exceptions + num_args = _num_args_nc_type.deserialize(deserializer) + if num_args > len(self._args): + raise TypeError('too many arguments') + # XXX: default arguments are currently not supported, thus we reject too few arguments too + if num_args < len(self._args): + raise TypeError('too few arguments') + args = [] + for value, _ in zip(self._args, range(num_args)): + args.append(value.deserialize(deserializer)) + return tuple(args) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> tuple[Any, ...]: + if not isinstance(json_value, list): + raise ValueError('expected list') + return tuple(v.json_to_value(i) for (i, v) in zip(json_value, self._args)) + + @override + def _value_to_json(self, value: tuple[Any, ...], /) -> NCType.Json: + return [v.value_to_json(i) for (i, v) in zip(value, self._args)] + + +class ArgsOnly: + """ This class is used to parse only arguments of a call, when all that is provided is a list of argument types. + + Its primary use is for implementing `NCRawArgs.try_parse_as`. + """ + args: _ArgsNCType + + def __init__(self, args_nc_type: _ArgsNCType) -> None: + """Do not build directly, use `ArgsOnly.from_arg_types`""" + self.args = args_nc_type + + @classmethod + def from_arg_types(cls, arg_types: tuple[type, ...]) -> Self: + args_nc_types: list[NCType] = [] + for arg_type in arg_types: + args_nc_types.append(make_nc_type_for_arg_type(arg_type)) + + return cls(_ArgsNCType(args_nc_types, max_bytes=MAX_BYTES_SERIALIZED_ARG)) + + def serialize_args_bytes(self, args: tuple[Any, ...] | list[Any]) -> bytes: + """ Shortcut to serialize args directly to a bytes instead of using a serializer. + """ + return _serialize_map_exception(self.args, args) + + def deserialize_args_bytes(self, data: bytes) -> tuple[Any, ...]: + """ Shortcut to deserialize args directly from bytes instead of using a deserializer. + """ + return _deserialize_map_exception(self.args, data) + + +class ReturnOnly: + """ + This class is used to parse only the return of a method. + + Its primary use is for validating the fallback method. + """ + return_nc_type: NCType + + def __init__(self, return_nc_type: NCType) -> None: + self.return_nc_type = return_nc_type + + @classmethod + def from_callable(cls, method: Callable) -> Self: + method_signature = _get_method_signature(method) + nc_type = make_nc_type_for_return_type(method_signature.return_annotation) + return cls(nc_type) + + def serialize_return_bytes(self, return_value: Any) -> bytes: + """Shortcut to serialize a return value directly to bytes instead of using a serializer.""" + return _serialize_map_exception(self.return_nc_type, return_value) + + def deserialize_return_bytes(self, data: bytes) -> Any: + """Shortcut to deserialize a return value directly from bytes instead of using a deserializer.""" + return _deserialize_map_exception(self.return_nc_type, data) + + +# XXX: currently the relationship between the method's signature's types and the `NCType`s type's cannot be described +# with Python/mypy's typing system +class Method: + """ This class abstracts a method's type signature in relation similarly to how NCType and Field abstract a loose + "value" or a classe's "field". + + This abstraction is used to (de)serialize the arguments of a method call, and (de)serialize the result of a method + call. It may also be used to transmit values when a nano-method calls another nano-method. + + For arguments, `make_nc_type_for_arg_type` is used, which tends to preserve original types as much as possible, but + for return types `make_nc_type_for_return_type` is used, which supports `None`. + """ + name: str + arg_names: tuple[str, ...] + args: _ArgsNCType + return_: NCType + + def __init__( + self, + *, + name: str, + arg_names: Iterable[str], + args_nc_type: _ArgsNCType, + return_nc_type: NCType, + ) -> None: + """Do not build directly, use `Method.from_callable`""" + self.name = name + self.arg_names = tuple(arg_names) + self.args = args_nc_type + self.return_ = return_nc_type + + @classmethod + def from_callable(cls, method: Callable) -> Self: + method_signature = _get_method_signature(method) + + # XXX: bound methods don't have the self argument + is_bound_method: bool + + match method: + case MethodType(): + is_bound_method = True + case FunctionType(): + is_bound_method = False + case _: + raise TypeError(f'{method!r} is neither a function or a bound method') + + for param in method_signature.parameters.values(): + if isinstance(param.annotation, str): + raise TypeError('string annotations (including `from __future__ import annotations`), ' + 'are not supported') + + arg_names = [] + args_nc_types = [] + iter_params = iter(method_signature.parameters.values()) + + # XXX: bound methods don't expose the self argument + if not is_bound_method: + self_param = next(iter_params) + if self_param.name != 'self': + # XXX: self_param is not technically required to be named 'self', it can be named anything, but it + # should at least be a warning because it's possible the author forgot the 'self' argument + raise TypeError('first argument should be self') + + if is_nc_public_method(method): + ctx_param = next(iter_params) + if ctx_param.annotation is not Context: + raise TypeError('context argument must be annotated as `ctx: Context`') + + for param in iter_params: + match param.kind: + case Parameter.POSITIONAL_ONLY: # these are arguments before / + # we accept these + pass + case Parameter.POSITIONAL_OR_KEYWORD: # there are normal arguments + # we accept these + pass + case Parameter.VAR_POSITIONAL: # these are *args kind of arguments + # XXX: we can technically support this, since these can be annotated + raise TypeError('variable *args arguments are not supported') + case Parameter.KEYWORD_ONLY: # these are arguments after * or *args, which are keyword-only + raise TypeError('keyword-only arguments are not supported') + case Parameter.VAR_KEYWORD: # these are **kwargs arguments + raise TypeError('variable **kwargs arguments are not supported') + case _ as impossible_kind: # no other type of argument exist + assert_never(impossible_kind) + # XXX: this can (and probably will) be implemented in the future + if param.default is not EMPTY: + raise TypeError('default values are not supported') + arg_names.append(param.name) + args_nc_types.append(make_nc_type_for_arg_type(param.annotation)) + + return cls( + name=method.__name__, + arg_names=arg_names, + args_nc_type=_ArgsNCType(args_nc_types, max_bytes=MAX_BYTES_SERIALIZED_ARG), + return_nc_type=make_nc_type_for_return_type(method_signature.return_annotation), + ) + + def serialize_args_bytes(self, args: tuple[Any, ...] | list[Any], kwargs: dict[str, Any] | None = None) -> bytes: + """ Shortcut to serialize args directly to a bytes instead of using a serializer. + """ + if len(args) > len(self.arg_names): + raise NCFail('too many arguments') + + merged: dict[str, Any] = {} + for index, arg in enumerate(args): + name = self.arg_names[index] + merged[name] = arg + + kwargs = kwargs or {} + for name, arg in kwargs.items(): + if name not in self.arg_names: + raise NCFail(f"{self.name}() got an unexpected keyword argument '{name}'") + if name in merged: + raise NCFail(f"{self.name}() got multiple values for argument '{name}'") + merged[name] = arg + + ordered_args = [] + for name in self.arg_names: + if name not in merged: + raise NCFail(f"{self.name}() missing required argument: '{name}'") + ordered_args.append(merged[name]) + + return _serialize_map_exception(self.args, tuple(ordered_args)) + + def deserialize_args_bytes(self, data: bytes) -> tuple[Any, ...]: + """ Shortcut to deserialize args directly from bytes instead of using a deserializer. + """ + return _deserialize_map_exception(self.args, data) + + def serialize_return_bytes(self, return_value: Any) -> bytes: + """ Shortcut to serialize a return value directly to a bytes instead of using a serializer. + """ + return _serialize_map_exception(self.return_, return_value) + + def deserialize_return_bytes(self, data: bytes) -> Any: + """ Shortcut to deserialize a return value directly from bytes instead of using a deserializer. + """ + return _deserialize_map_exception(self.return_, data) + + +def _get_method_signature(method: Callable) -> Signature: + if not callable(method): + raise TypeError(f'{method!r} is not a callable object') + + # XXX: explicit all arguments to explain the choices, even if default + return signature( + method, + follow_wrapped=True, # we're interested in the implementation's signature, so we follow wrappers + globals=None, # don't expose any global + locals=None, # don't expose any local + # XXX: do not evaluate strings, this means `from __future__ import annotations` is not supported, ideally + # we should support it because it's very convenient, but it must be done with care, otherwise we could + # run into cases that do `def foo(self, i: '2**100**100') -> None`, which is syntactically legal + eval_str=False, + ) diff --git a/hathor/nanocontracts/nc_exec_logs.py b/hathor/nanocontracts/nc_exec_logs.py new file mode 100644 index 000000000..dc98c9876 --- /dev/null +++ b/hathor/nanocontracts/nc_exec_logs.py @@ -0,0 +1,366 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import json +import os.path +from collections import defaultdict +from dataclasses import dataclass, field +from enum import IntEnum, StrEnum, auto, unique +from pathlib import Path +from typing import TYPE_CHECKING, Any, Literal, assert_never + +from pydantic import Field, validator +from typing_extensions import override + +from hathor.nanocontracts import NCFail +from hathor.nanocontracts.runner import CallInfo, CallRecord, CallType +from hathor.nanocontracts.types import ContractId +from hathor.reactor import ReactorProtocol +from hathor.transaction import Transaction +from hathor.types import VertexId +from hathor.utils.pydantic import BaseModel + +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + +MAX_EVENT_SIZE: int = 100 * 1024 # 100KiB + + +@unique +class NCLogConfig(StrEnum): + # Don't save any nano contract logs. + NONE = auto() + + # Save logs for all nano contracts. + ALL = auto() + + # Only save logs for nano contracts that failed. + FAILED = auto() + + # Only save logs for nano contracts that failed with an unhandled exception (that is, not NCFail). + FAILED_UNHANDLED = auto() + + +@unique +class NCLogLevel(IntEnum): + """The log level of NC execution logs.""" + DEBUG = 0 + INFO = 1 + WARN = 2 + ERROR = 3 + + @staticmethod + def from_str(value: str) -> NCLogLevel | None: + """Create a NCLogLevel from a string, or return None if it's invalid.""" + try: + return NCLogLevel[value] + except KeyError: + return None + + +class _BaseNCEntry(BaseModel): + type: str + level: NCLogLevel + timestamp: float + + @override + def dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: + json_dict = super().dict(*args, **kwargs) + json_dict['level'] = self.level.name + return json_dict + + @validator('level', pre=True) + def _parse_level(cls, level: NCLogLevel | int | str) -> NCLogLevel: + if isinstance(level, NCLogLevel): + return level + if isinstance(level, int): + return NCLogLevel(level) + if isinstance(level, str): + return NCLogLevel[level] + raise TypeError(f'invalid level type: {type(level)}') + + +class NCLogEntry(_BaseNCEntry): + """An entry representing a single log in a NC execution.""" + type: Literal['LOG'] = Field(const=True, default='LOG') + message: str + key_values: dict[str, str] = Field(default_factory=dict) + + +class NCCallBeginEntry(_BaseNCEntry): + """An entry representing a single method call beginning in a NC execution.""" + type: Literal['CALL_BEGIN'] = Field(const=True, default='CALL_BEGIN') + level: NCLogLevel = Field(const=True, default=NCLogLevel.DEBUG) + nc_id: VertexId + call_type: CallType + method_name: str + str_args: str = '()' + actions: list[dict[str, Any]] | None + + @staticmethod + def from_call_record(call_record: CallRecord, *, timestamp: float) -> NCCallBeginEntry: + """Create a NCCallEntry from a CallRecord.""" + actions = None + if call_record.ctx is not None: + ctx_json = call_record.ctx.to_json() + actions = ctx_json['actions'] + + return NCCallBeginEntry( + nc_id=call_record.contract_id, + call_type=call_record.type, + method_name=call_record.method_name, + str_args=str(call_record.args), + timestamp=timestamp, + actions=actions + ) + + @override + def dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: + json_dict = super().dict(*args, **kwargs) + json_dict['nc_id'] = self.nc_id.hex() + return json_dict + + @validator('nc_id', pre=True) + def _parse_nc_id(cls, vertex_id: VertexId | str) -> VertexId: + if isinstance(vertex_id, VertexId): + return vertex_id + if isinstance(vertex_id, str): + return bytes.fromhex(vertex_id) + raise TypeError(f'invalid vertex_id type: {type(vertex_id)}') + + +class NCCallEndEntry(_BaseNCEntry): + """An entry representing a single method call ending in a NC execution.""" + type: Literal['CALL_END'] = Field(const=True, default='CALL_END') + level: NCLogLevel = Field(const=True, default=NCLogLevel.DEBUG) + + +class NCExecEntry(BaseModel): + """ + An entry representing the whole execution of a NC. + It may contain several calls across different NCs, with logs in order. + """ + logs: list[NCCallBeginEntry | NCLogEntry | NCCallEndEntry] + error_traceback: str | None = None + + @staticmethod + def from_call_info(call_info: CallInfo, error_tb: str | None) -> NCExecEntry: + """Create a NCExecEntry from a CallInfo and an optional traceback.""" + return NCExecEntry( + logs=call_info.nc_logger.__entries__, + error_traceback=error_tb, + ) + + def filter(self, log_level: NCLogLevel) -> NCExecEntry: + """Create a new NCExecEntry while keeping logs with the provided log level or higher.""" + return self.copy( + update=dict( + logs=[log for log in self.logs if log.level >= log_level], + ), + ) + + +class NCExecEntries(BaseModel): + """ + A mapping of block IDs to lists of NC executions. + If there are reorgs, a single block can execute the same NC more than once. + """ + entries: dict[VertexId, list[NCExecEntry]] + + @staticmethod + def from_json(json_dict: dict[str, Any]) -> NCExecEntries: + entries = { + bytes.fromhex(block_id_hex): [NCExecEntry.parse_obj(entry) for entry in entries] + for block_id_hex, entries in json_dict.items() + } + return NCExecEntries(entries=entries) + + @override + def dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: + return { + block_id.hex(): [entry.dict(*args, **kwargs) for entry in block_entries] + for block_id, block_entries in self.entries.items() + } + + +@dataclass(slots=True, frozen=True, kw_only=True) +class NCEvent: + nc_id: ContractId + data: bytes + + +# TODO: Rename to something else now that it has events? move events out of it? +@dataclass(slots=True) +class NCLogger: + """ + A dataclass that provides instrumentation-related features, including logging-equivalent functionality + saving log entries in memory, and emission of events. + To be used inside Blueprints. + """ + __reactor__: ReactorProtocol + __nc_id__: ContractId + __entries__: list[NCCallBeginEntry | NCLogEntry | NCCallEndEntry] = field(default_factory=list) + __events__: list[NCEvent] = field(default_factory=list) + + def debug(self, message: str, **kwargs: Any) -> None: + """Create a new DEBUG log entry.""" + self.__log__(NCLogLevel.DEBUG, message, **kwargs) + + def info(self, message: str, **kwargs: Any) -> None: + """Create a new INFO log entry.""" + self.__log__(NCLogLevel.INFO, message, **kwargs) + + def warn(self, message: str, **kwargs: Any) -> None: + """Create a new WARN log entry.""" + self.__log__(NCLogLevel.WARN, message, **kwargs) + + def error(self, message: str, **kwargs: Any) -> None: + """Create a new ERROR log entry.""" + self.__log__(NCLogLevel.ERROR, message, **kwargs) + + def __emit_event__(self, data: bytes) -> None: + """Emit a custom event from a Nano Contract.""" + if len(data) > MAX_EVENT_SIZE: + raise ValueError(f'event data cannot be larger than {MAX_EVENT_SIZE} bytes, is {len(data)}') + self.__events__.append(NCEvent(nc_id=self.__nc_id__, data=data)) + + def __log__(self, level: NCLogLevel, message: str, **kwargs: Any) -> None: + """Create a new log entry.""" + key_values = {k: v.hex() if isinstance(v, bytes) else str(v) for k, v in kwargs.items()} + entry = NCLogEntry(level=level, message=message, key_values=key_values, timestamp=self.__reactor__.seconds()) + self.__entries__.append(entry) + + def __log_call_begin__(self, call_record: CallRecord) -> None: + """Log the beginning of a call.""" + self.__entries__.append(NCCallBeginEntry.from_call_record(call_record, timestamp=self.__reactor__.seconds())) + + def __log_call_end__(self) -> None: + """Log the end of a call.""" + self.__entries__.append(NCCallEndEntry(timestamp=self.__reactor__.seconds())) + + +NC_EXEC_LOGS_DIR = 'nc_exec_logs' + + +class NCLogStorage: + """ + A storage to persist NC execution logs in the file system. + """ + __slots__ = ('settings', '_path', '_config') + + def __init__(self, *, settings: HathorSettings, path: str, config: NCLogConfig) -> None: + self.settings = settings + self._path = Path(path).joinpath(NC_EXEC_LOGS_DIR) + self._config = config + + def save_logs(self, tx: Transaction, call_info: CallInfo, exception_and_tb: tuple[NCFail, str] | None) -> None: + """Persist new NC execution logs.""" + assert tx.is_nano_contract() + meta = tx.get_metadata() + assert meta.first_block is not None, 'nc exec logs can only be saved when the nc is confirmed' + exception, tb = exception_and_tb if exception_and_tb is not None else (None, None) + + match self._config: + case NCLogConfig.NONE: + # don't save any logs + return + case NCLogConfig.ALL: + # save all logs + pass + case NCLogConfig.FAILED: + if exception is None: + # don't save when there's no exception + return + case NCLogConfig.FAILED_UNHANDLED: + if exception is None: + # don't save when there's no exception + return + assert isinstance(exception, NCFail) + if not exception.__cause__ or isinstance(exception.__cause__, NCFail): + # don't save when it's a simple NCFail or caused by a NCFail + return + case _: + assert_never(self._config) + + new_entry = NCExecEntry.from_call_info(call_info, tb) + new_line_dict = {meta.first_block.hex(): new_entry.dict()} + path = self._get_file_path(tx.hash) + + with path.open(mode='a') as f: + f.write(json.dumps(new_line_dict) + '\n') + + def _get_file_path(self, vertex_id: VertexId) -> Path: + dir_path = self._path.joinpath(vertex_id[0:1].hex()) + os.makedirs(dir_path, exist_ok=True) + return dir_path.joinpath(f'{vertex_id.hex()}.jsonl') + + def _get_entries(self, nano_contract_id: VertexId, *, block_id: VertexId | None) -> NCExecEntries | None: + """Internal method to get NCExecEntries from the file system, or None if it doesn't exist.""" + path = self._get_file_path(nano_contract_id) + if not os.path.isfile(path): + return None + + all_execs = defaultdict(list) + with path.open(mode='r') as f: + for line in f: + if not line: + break + line_dict = json.loads(line) + keys = list(line_dict.keys()) + assert len(line_dict.keys()) == 1 + block_id_key = keys[0] + if block_id is None or block_id_key == block_id.hex(): + all_execs[block_id_key].append(line_dict[block_id_key]) + + return NCExecEntries.from_json(all_execs) + + def get_logs( + self, + nano_contract_id: VertexId, + *, + log_level: NCLogLevel = NCLogLevel.DEBUG, + block_id: VertexId | None = None, + ) -> NCExecEntries | None: + """ + Return NC execution logs to the provided NC ID. + + Args: + nano_contract_id: the id of the NC to be retrieved. + log_level: the minimum log level of desired logs. + block_id: optional block ID of the block that executed the NC. + + Returns: + A dict of block IDs to lists of NCExecEntry. + """ + logs = self._get_entries(nano_contract_id, block_id=block_id) + if logs is None: + return None + entries = { + exec_block_id: [nc_exec_entry.filter(log_level) for nc_exec_entry in entries] + for exec_block_id, entries in logs.entries.items() + } + return NCExecEntries(entries=entries) + + def get_json_logs( + self, + nano_contract_id: VertexId, + *, + log_level: NCLogLevel = NCLogLevel.DEBUG, + block_id: VertexId | None = None, + ) -> dict[str, Any] | None: + """Return NC execution logs to the provided NC ID as json.""" + logs = self.get_logs(nano_contract_id, log_level=log_level, block_id=block_id) + return None if logs is None else logs.dict() diff --git a/hathor/nanocontracts/nc_types/__init__.py b/hathor/nanocontracts/nc_types/__init__.py new file mode 100644 index 000000000..f416d8321 --- /dev/null +++ b/hathor/nanocontracts/nc_types/__init__.py @@ -0,0 +1,178 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict, deque +from types import NoneType, UnionType +from typing import NamedTuple, TypeVar, Union + +from hathor.nanocontracts.nc_types.address_nc_type import AddressNCType +from hathor.nanocontracts.nc_types.bool_nc_type import BoolNCType +from hathor.nanocontracts.nc_types.bytes_nc_type import BytesLikeNCType, BytesNCType +from hathor.nanocontracts.nc_types.collection_nc_type import DequeNCType, FrozenSetNCType, ListNCType, SetNCType +from hathor.nanocontracts.nc_types.dataclass_nc_type import DataclassNCType +from hathor.nanocontracts.nc_types.fixed_size_bytes_nc_type import Bytes32NCType +from hathor.nanocontracts.nc_types.map_nc_type import DictNCType +from hathor.nanocontracts.nc_types.namedtuple_nc_type import NamedTupleNCType +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.nanocontracts.nc_types.null_nc_type import NullNCType +from hathor.nanocontracts.nc_types.optional_nc_type import OptionalNCType +from hathor.nanocontracts.nc_types.signed_data_nc_type import SignedDataNCType +from hathor.nanocontracts.nc_types.sized_int_nc_type import Int32NCType, Uint32NCType +from hathor.nanocontracts.nc_types.str_nc_type import StrNCType +from hathor.nanocontracts.nc_types.token_uid_nc_type import TokenUidNCType +from hathor.nanocontracts.nc_types.tuple_nc_type import TupleNCType +from hathor.nanocontracts.nc_types.utils import TypeAliasMap, TypeToNCTypeMap +from hathor.nanocontracts.nc_types.varint_nc_type import VarInt32NCType, VarUint32NCType +from hathor.nanocontracts.types import ( + Address, + Amount, + BlueprintId, + ContractId, + SignedData, + Timestamp, + TokenUid, + TxOutputScript, + VertexId, +) + +__all__ = [ + 'ARG_TYPE_TO_NC_TYPE_MAP', + 'DEFAULT_TYPE_ALIAS_MAP', + 'ESSENTIAL_TYPE_ALIAS_MAP', + 'FIELD_TYPE_TO_NC_TYPE_MAP', + 'RETURN_TYPE_TO_NC_TYPE_MAP', + 'AddressNCType', + 'BoolNCType', + 'BytesLikeNCType', + 'BytesNCType', + 'DataclassNCType', + 'DequeNCType', + 'DictNCType', + 'FrozenSetNCType', + 'Int32NCType', + 'ListNCType', + 'NCType', + 'NamedTupleNCType', + 'NullNCType', + 'OptionalNCType', + 'SetNCType', + 'SignedDataNCType', + 'StrNCType', + 'TupleNCType', + 'TypeAliasMap', + 'TypeToNCTypeMap', + 'Uint32NCType', + 'VarInt32NCType', + 'VarUint32NCType', + 'make_nc_type_for_field_type', + 'make_nc_type_for_arg_type', + 'make_nc_type_for_return_type', +] + +T = TypeVar('T') + +# this is the minimum type-alias-map needed for everything to work as intended +ESSENTIAL_TYPE_ALIAS_MAP: TypeAliasMap = { + # XXX: technically types.UnionType is not a type, so mypy complains, but for our purposes it is a type + Union: UnionType, # type: ignore[dict-item] +} + +# when used inside fields these must emit a warning, because an immutable variant is provided instead, if the mutable +# variant was provided the mutability would not be tracked +DEFAULT_TYPE_ALIAS_MAP: TypeAliasMap = { + **ESSENTIAL_TYPE_ALIAS_MAP, + OrderedDict: dict, + bytearray: bytes, + # deque: tuple, # I think this is too much + list: tuple, + set: frozenset, +} + +# Mapping between types and NCType classes. +FIELD_TYPE_TO_NC_TYPE_MAP: TypeToNCTypeMap = { + # builtin types: + bool: BoolNCType, + bytes: BytesNCType, + dict: DictNCType, + frozenset: FrozenSetNCType, + int: VarInt32NCType, + str: StrNCType, + tuple: TupleNCType, + # other Python types: + # XXX: ignored dict-item because Union is not considered a type, so mypy fails it, but it works for our case + Union: OptionalNCType, # type: ignore[dict-item] + UnionType: OptionalNCType, + NamedTuple: NamedTupleNCType, + # hathor types: + Address: AddressNCType, + Amount: VarUint32NCType, + BlueprintId: Bytes32NCType, + ContractId: Bytes32NCType, + Timestamp: Uint32NCType, + TokenUid: TokenUidNCType, + TxOutputScript: BytesLikeNCType[TxOutputScript], + VertexId: Bytes32NCType, + SignedData: SignedDataNCType, +} + +# This mapping includes all supported NCType classes, should only be used for parsing function calls +ARG_TYPE_TO_NC_TYPE_MAP: TypeToNCTypeMap = { + **FIELD_TYPE_TO_NC_TYPE_MAP, + # bultin types: + list: ListNCType, + set: SetNCType, + # other Python types: + deque: DequeNCType, + OrderedDict: DictNCType, +} + +RETURN_TYPE_TO_NC_TYPE_MAP: TypeToNCTypeMap = { + **ARG_TYPE_TO_NC_TYPE_MAP, + # XXX: ignored dict-item because technically None is not a type, type[None]/NoneType is + None: NullNCType, # type: ignore[dict-item] + NoneType: NullNCType, # this can come up here as well as None +} + + +_FIELD_TYPE_MAP = NCType.TypeMap(DEFAULT_TYPE_ALIAS_MAP, FIELD_TYPE_TO_NC_TYPE_MAP) + + +def make_nc_type_for_field_type(type_: type[T], /) -> NCType[T]: + """ Like NCType.from_type, but with maps for field annotations. + + If you need to customize the mapping use `NCType.from_type` instead. + """ + return NCType.from_type(type_, type_map=_FIELD_TYPE_MAP) + + +_ARG_TYPE_MAP = NCType.TypeMap(ESSENTIAL_TYPE_ALIAS_MAP, ARG_TYPE_TO_NC_TYPE_MAP) + + +def make_nc_type_for_arg_type(type_: type[T], /) -> NCType[T]: + """ Like NCType.from_type, but with maps for function arg annotations. + + If you need to customize the mapping use `NCType.from_type` instead. + """ + return NCType.from_type(type_, type_map=_ARG_TYPE_MAP) + + +_RETURN_TYPE_MAP = NCType.TypeMap(ESSENTIAL_TYPE_ALIAS_MAP, RETURN_TYPE_TO_NC_TYPE_MAP) + + +def make_nc_type_for_return_type(type_: type[T], /) -> NCType[T]: + """ Like NCType.from_type, but with maps for function return annotations. + + If you need to customize the mapping use `NCType.from_type` instead. + """ + return NCType.from_type(type_, type_map=_RETURN_TYPE_MAP) diff --git a/hathor/nanocontracts/nc_types/address_nc_type.py b/hathor/nanocontracts/nc_types/address_nc_type.py new file mode 100644 index 000000000..e3f2e93c9 --- /dev/null +++ b/hathor/nanocontracts/nc_types/address_nc_type.py @@ -0,0 +1,67 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing_extensions import Self, override + +from hathor.crypto.util import decode_address, get_address_b58_from_bytes +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.nanocontracts.types import Address +from hathor.serialization import Deserializer, Serializer +from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES +from hathor.utils.typing import is_subclass + + +class AddressNCType(NCType[Address]): + """ Represents `Address` values, which use a different JSON encoding than bytes. + """ + _is_hashable = True + + @override + @classmethod + def _from_type(cls, type_: type[Address], /, *, type_map: NCType.TypeMap) -> Self: + if not is_subclass(type_, bytes): + raise TypeError('expected bytes-like type') + return cls() + + @override + def _check_value(self, value: Address, /, *, deep: bool) -> None: + if not isinstance(value, bytes): + raise TypeError('expected bytes type') + if len(value) != ADDRESS_LEN_BYTES: + raise ValueError(f'an address must always have {ADDRESS_LEN_BYTES} bytes') + + @override + def _serialize(self, serializer: Serializer, value: Address, /) -> None: + data = bytes(value) + assert len(data) == ADDRESS_LEN_BYTES # XXX: double check + serializer.write_bytes(data) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> Address: + data = bytes(deserializer.read_bytes(ADDRESS_LEN_BYTES)) + return Address(data) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> Address: + if not isinstance(json_value, str): + raise ValueError('expected str') + # XXX: maybe decode_address could be migrated to hathor.serializers.encoding.b58_address + return Address(decode_address(json_value)) + + @override + def _value_to_json(self, value: Address, /) -> NCType.Json: + # XXX: maybe get_address_b58_from_bytes could be migrated to hathor.serializers.encoding.b58_address + return get_address_b58_from_bytes(value) diff --git a/hathor/nanocontracts/nc_types/bool_nc_type.py b/hathor/nanocontracts/nc_types/bool_nc_type.py new file mode 100644 index 000000000..960ba4b2d --- /dev/null +++ b/hathor/nanocontracts/nc_types/bool_nc_type.py @@ -0,0 +1,58 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.bool import decode_bool, encode_bool + + +class BoolNCType(NCType[bool]): + """ Represents builtin `bool` values. + """ + + _is_hashable = True + + @override + @classmethod + def _from_type(cls, type_: type[bool], /, *, type_map: NCType.TypeMap) -> Self: + if type_ is not bool: + raise TypeError('expected bool type') + return cls() + + @override + def _check_value(self, value: bool, /, *, deep: bool) -> None: + if not isinstance(value, bool): + raise TypeError('expected boolean') + + @override + def _serialize(self, serializer: Serializer, value: bool, /) -> None: + encode_bool(serializer, value) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> bool: + return decode_bool(deserializer) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> bool: + if not isinstance(json_value, bool): + raise ValueError('expected bool') + return json_value + + @override + def _value_to_json(self, value: bool, /) -> NCType.Json: + return value diff --git a/hathor/nanocontracts/nc_types/bytes_nc_type.py b/hathor/nanocontracts/nc_types/bytes_nc_type.py new file mode 100644 index 000000000..2358d8b03 --- /dev/null +++ b/hathor/nanocontracts/nc_types/bytes_nc_type.py @@ -0,0 +1,96 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from inspect import isclass +from typing import TypeVar + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.consts import DEFAULT_BYTES_MAX_LENGTH +from hathor.serialization.encoding.bytes import decode_bytes, encode_bytes +from hathor.utils.typing import is_subclass + +B = TypeVar('B', bound=bytes) + + +class BytesLikeNCType(NCType[B]): + """ Represents values from class that inherit/new-type `bytes`. + """ + + __slots__ = ('_actual_type') + _is_hashable = True + _actual_type: type[B] + + def __init__(self, actual_type: type[B]) -> None: + self._actual_type = actual_type + + @override + @classmethod + def _from_type(cls, type_: type[B], /, *, type_map: NCType.TypeMap) -> Self: + if not is_subclass(type_, bytes): + raise TypeError('expected bytes-like type') + return cls(type_) + + @override + def _check_value(self, value: bytes, /, *, deep: bool) -> None: + if isclass(self._actual_type): + if not isinstance(value, self._actual_type): + raise TypeError('expected {self._actual_type} instance') + else: + if not isinstance(value, bytes): + raise TypeError('expected bytes instance') + + @override + def _serialize(self, serializer: Serializer, value: B, /) -> None: + data = bytes(value) + encode_bytes(serializer.with_max_bytes(DEFAULT_BYTES_MAX_LENGTH), data) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> B: + data = decode_bytes(deserializer.with_max_bytes(DEFAULT_BYTES_MAX_LENGTH)) + return self._actual_type(data) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> B: + if not isinstance(json_value, str): + raise ValueError('expected str') + data = bytes.fromhex(json_value) + return self._actual_type(data) + + @override + def _value_to_json(self, value: B, /) -> NCType.Json: + data = bytes(value) + return data.hex() + + +class BytesNCType(BytesLikeNCType[bytes]): + """ Represents builtin `bytes` values. + """ + __slots__ = () + _actual_type = bytes + + @override + def __init__(self) -> None: + pass + + @override + @classmethod + def _from_type(cls, type_: type[bytes], /, *, type_map: NCType.TypeMap) -> Self: + if type_ is not bytes: + raise TypeError('expected bytes type') + return cls() diff --git a/hathor/nanocontracts/nc_types/collection_nc_type.py b/hathor/nanocontracts/nc_types/collection_nc_type.py new file mode 100644 index 000000000..c7fa9cf00 --- /dev/null +++ b/hathor/nanocontracts/nc_types/collection_nc_type.py @@ -0,0 +1,157 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections import deque +from collections.abc import Collection, Hashable, Iterable, Set +from typing import TypeVar, get_args, get_origin + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.nanocontracts.nc_types.utils import is_origin_hashable +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.compound_encoding.collection import decode_collection, encode_collection + +T = TypeVar('T') +H = TypeVar('H', bound=Hashable) + + +class _CollectionNCType(NCType[Collection[T]], ABC): + """ Used as base for NCType classes that represent collecions. + """ + __slots__ = ('_item',) + + _is_hashable = False + _item: NCType[T] + + def __init__(self, item_nc_type: NCType[T], /) -> None: + self._item = item_nc_type + + @abstractmethod + def _build(self, items: Iterable[T]) -> Collection[T]: + """ How to build the concrete collection from an iterable of items. + """ + raise NotImplementedError + + @override + @classmethod + def _from_type(cls, type_: type[Collection[T]], /, *, type_map: NCType.TypeMap) -> Self: + member_type = cls._get_member_type(type_) + member_nc_type = NCType.from_type(member_type, type_map=type_map) + return cls(member_nc_type) + + @classmethod + def _get_member_type(cls, type_: type[Collection[T]]) -> type[T]: + origin_type: type = get_origin(type_) or type_ + if not issubclass(origin_type, Collection): + raise TypeError('expected Collection type') + args = get_args(type_) + if not args or len(args) != 1: + raise TypeError(f'expected {type_.__name__}[]') + return args[0] + + def _check_item(self, item: T) -> None: + self._item._check_value(item, deep=True) + + @override + def _check_value(self, value: Collection[T], /, *, deep: bool) -> None: + if not isinstance(value, Collection): + raise TypeError('expected Collection type') + if deep: + for i in value: + self._check_item(i) + + @override + def _serialize(self, serializer: Serializer, value: Collection[T], /) -> None: + encode_collection(serializer, value, self._item.serialize) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> Collection[T]: + return decode_collection( + deserializer, + self._item.deserialize, + self._build, + ) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> Collection[T]: + if not isinstance(json_value, list): + raise ValueError('expected list') + return self._build(self._item.json_to_value(i) for i in json_value) + + @override + def _value_to_json(self, value: Collection[T], /) -> NCType.Json: + return [self._item.value_to_json(i) for i in value] + + +class ListNCType(_CollectionNCType[T]): + """ Represents builtin `list` values. + """ + + @override + def _build(self, items: Iterable[T]) -> list[T]: + return list(items) + + +class DequeNCType(_CollectionNCType[T]): + """ Represents builtin `collections.deque` values. + """ + + @override + def _build(self, items: Iterable[T]) -> deque[T]: + return deque(items) + + +class SetNCType(_CollectionNCType[H]): + """ Represents builtin `set` values. + """ + + @override + def _build(self, items: Iterable[H]) -> Set[H]: + return set(items) + + @override + @classmethod + def _get_member_type(cls, type_: type[Collection[T]]) -> type[T]: + origin_type: type = get_origin(type_) or type_ + if not issubclass(origin_type, Set): + raise TypeError('expected Set type') + args = get_args(type_) + if not args or len(args) != 1: + raise TypeError(f'expected {type_.__name__}[]') + member_type, = args + if not is_origin_hashable(args[0]): + raise TypeError(f'{args[0]} is not hashable') + return member_type + + @override + def _check_item(self, item: H) -> None: + if not isinstance(item, Hashable): + raise TypeError('expected Hashable type') + super()._check_item(item) + + +class FrozenSetNCType(SetNCType[H]): + """ Represents builtin `frozenset` values. + """ + + # XXX: SetNCType already enforces H to be hashable, but is not itself hashable, a frozenset, however, is hashable + _is_hashable = True + + @override + def _build(self, items: Iterable[H]) -> frozenset[H]: + return frozenset(items) diff --git a/hathor/nanocontracts/nc_types/dataclass_nc_type.py b/hathor/nanocontracts/nc_types/dataclass_nc_type.py new file mode 100644 index 000000000..e460974e1 --- /dev/null +++ b/hathor/nanocontracts/nc_types/dataclass_nc_type.py @@ -0,0 +1,107 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This NCType class is not meant for use in a TypeMap, it is meant to facilitate creating NCType classes for simple +dataclasses for easily making them accessible to NCStorage. + +In theory it could be generalized for use in the future but we have to be careful with supporting types defined inside +and OCB and the mapping logic will need to be adapted or special cased to support this. +""" + +from __future__ import annotations + +from dataclasses import fields, is_dataclass +from typing import TYPE_CHECKING, Any, TypeVar + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.nanocontracts.nc_types.optional_nc_type import OptionalNCType +from hathor.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from _typeshed import DataclassInstance + +D = TypeVar('D', bound='DataclassInstance') + + +def make_dataclass_nc_type(class_: type[D]) -> DataclassNCType[D]: + """ Helper function to build a NCType for the given dataclass. + """ + from hathor.nanocontracts.nc_types import DEFAULT_TYPE_ALIAS_MAP, RETURN_TYPE_TO_NC_TYPE_MAP + type_map = NCType.TypeMap(DEFAULT_TYPE_ALIAS_MAP, RETURN_TYPE_TO_NC_TYPE_MAP) + return DataclassNCType._from_type(class_, type_map=type_map) + + +def make_dataclass_opt_nc_type(class_: type[D]) -> OptionalNCType[D]: + """ Helper function to build an OptionalNCType for the given dataclass. + """ + return OptionalNCType(make_dataclass_nc_type(class_)) + + +class DataclassNCType(NCType[D]): + __slots__ = ('_fields', '_class') + _is_hashable = False # it might be possible to calculate _is_hashable, but we don't need it + _fields: dict[str, NCType] + _class: type[D] + + def __init__(self, fields_: dict[str, NCType], class_: type[D]): + self._fields = fields_ + self._class = class_ + + @override + @classmethod + def _from_type(cls, type_: type[D], /, *, type_map: NCType.TypeMap) -> Self: + if not is_dataclass(type_): + raise TypeError('expected a dataclass') + # XXX: the order is important, but `dict` and `fields` should have a stable order + values: dict[str, NCType] = {} + for field in fields(type_): + values[field.name] = NCType.from_type(field.type, type_map=type_map) + # XXX: ignore arg-type because after using is_dataclass(type_) mypy gets confused about type_'s type + return cls(values, type_) # type: ignore[arg-type] + + @override + def _check_value(self, value: D, /, *, deep: bool) -> None: + if not isinstance(value, self._class): + raise TypeError(f'expected {self._class} instance') + + @override + def _serialize(self, serializer: Serializer, value: D, /) -> None: + for field_name, field_nc_type in self._fields.items(): + field_nc_type.serialize(serializer, getattr(value, field_name)) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> D: + kwargs: dict[str, Any] = {} + for field_name, field_nc_type in self._fields.items(): + kwargs[field_name] = field_nc_type.deserialize(deserializer) + return self._class(**kwargs) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> D: + if not isinstance(json_value, dict): + raise ValueError('expected dict') + kwargs: dict[str, Any] = {} + for field_name, field_nc_type in self._fields.items(): + kwargs[field_name] = field_nc_type.json_to_value(json_value[field_name]) + return self._class(*kwargs) + + @override + def _value_to_json(self, value: D) -> NCType.Json: + return { + field_name: field_nc_type.value_to_json(getattr(value, field_name)) + for field_name, field_nc_type in self._fields.items() + } diff --git a/hathor/nanocontracts/nc_types/fixed_size_bytes_nc_type.py b/hathor/nanocontracts/nc_types/fixed_size_bytes_nc_type.py new file mode 100644 index 000000000..63ffc7949 --- /dev/null +++ b/hathor/nanocontracts/nc_types/fixed_size_bytes_nc_type.py @@ -0,0 +1,84 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import ClassVar, TypeVar + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer +from hathor.utils.typing import is_subclass + +B = TypeVar('B', bound=bytes) + + +class _FixedSizeBytesNCType(NCType[B]): + _is_hashable = True + _size: ClassVar[int] + _actual_type: type[B] + + def __init__(self, actual_type: type[B]) -> None: + self._actual_type = actual_type + + @override + @classmethod + def _from_type(cls, type_: type[B], /, *, type_map: NCType.TypeMap) -> Self: + if not is_subclass(type_, bytes): + raise TypeError('expected bytes-like type') + return cls(type_) + + def _filter_in(self, value: B, /) -> bytes: + """Mechanism to convert B into bytes before serializing.""" + return bytes(value) + + def _filter_out(self, data: bytes, /) -> B: + """Mechanism to convert bytes into B after deserializing.""" + return self._actual_type(data) + + @override + def _check_value(self, value: B, /, *, deep: bool) -> None: + if not isinstance(value, bytes): + raise TypeError(f'expected bytes type, not {type(value)}') + data = self._filter_in(value) + if len(data) != self._size: + raise TypeError( + f'value has {len(value)} bytes, expected ' + f'{self._actual_type.__name__} to always have {self._size} bytes' + ) + + @override + def _serialize(self, serializer: Serializer, value: B, /) -> None: + data = bytes(value) + assert len(data) == self._size # XXX: double check + serializer.write_bytes(data) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> B: + return self._filter_out(bytes(deserializer.read_bytes(self._size))) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> B: + if not isinstance(json_value, str): + raise ValueError('expected str') + return self._filter_out(bytes.fromhex(json_value)) + + @override + def _value_to_json(self, value: bytes, /) -> NCType.Json: + return value.hex() + + +class Bytes32NCType(_FixedSizeBytesNCType[B]): + _size = 32 diff --git a/hathor/nanocontracts/nc_types/map_nc_type.py b/hathor/nanocontracts/nc_types/map_nc_type.py new file mode 100644 index 000000000..ad2c52359 --- /dev/null +++ b/hathor/nanocontracts/nc_types/map_nc_type.py @@ -0,0 +1,107 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import Hashable, Mapping +from typing import Iterable, TypeVar, get_args, get_origin + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.nanocontracts.nc_types.utils import is_origin_hashable +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.compound_encoding.mapping import decode_mapping, encode_mapping + +T = TypeVar('T') +H = TypeVar('H', bound=Hashable) + + +class _MapNCType(NCType[Mapping[H, T]], ABC): + """ Base class to help implement NCType for mappings. + """ + + __slots__ = ('_key', '_value') + + _key: NCType[H] + _value: NCType[T] + _is_hashable = False + + def __init__(self, key: NCType[H], value: NCType[T]) -> None: + self._key = key + self._value = value + + @abstractmethod + def _build(self, items: Iterable[tuple[H, T]]) -> Mapping[H, T]: + """ How to build the concrete map from an iterable of (key, value). + """ + raise NotImplementedError + + @override + @classmethod + def _from_type(cls, type_: type[Mapping[H, T]], /, *, type_map: NCType.TypeMap) -> Self: + origin_type: type = get_origin(type_) or type_ + if not issubclass(origin_type, Mapping): + raise TypeError('expected Mapping type') + args = get_args(type_) + if not args or len(args) != 2: + raise TypeError(f'expected {type_.__name__}[, ]') + key_type, value_type = args + if not is_origin_hashable(key_type): + raise TypeError(f'{key_type} is not hashable') + key_nc_type = NCType.from_type(key_type, type_map=type_map) + assert key_nc_type.is_hashable(), 'hashable "types" must produce hashable "values"' + return cls(key_nc_type, NCType.from_type(value_type, type_map=type_map)) + + @override + def _check_value(self, value: Mapping[H, T], /, *, deep: bool) -> None: + if not isinstance(value, Mapping): + raise TypeError('expected Mapping type') + if deep: + for k, v in value.items(): + self._key._check_value(k, deep=True) + self._value._check_value(v, deep=True) + + @override + def _serialize(self, serializer: Serializer, value: Mapping[H, T], /) -> None: + encode_mapping(serializer, value, self._key.serialize, self._value.serialize) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> Mapping[H, T]: + return decode_mapping( + deserializer, + self._key.deserialize, + self._value.deserialize, + self._build, + ) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> Mapping[H, T]: + if not isinstance(json_value, dict): + raise ValueError('expected dict') + return self._build((self._key.json_to_value(k), self._value.json_to_value(v)) for k, v in json_value.items()) + + @override + def _value_to_json(self, value: Mapping[H, T], /) -> NCType.Json: + return {self._key.value_to_json(k): self._value.value_to_json(v) for k, v in value.items()} + + +class DictNCType(_MapNCType): + """ Represents builtin `dict` values. + """ + + @override + def _build(self, items: Iterable[tuple[H, T]]) -> dict[H, T]: + return dict(items) diff --git a/hathor/nanocontracts/nc_types/namedtuple_nc_type.py b/hathor/nanocontracts/nc_types/namedtuple_nc_type.py new file mode 100644 index 000000000..ff82f2b14 --- /dev/null +++ b/hathor/nanocontracts/nc_types/namedtuple_nc_type.py @@ -0,0 +1,78 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections.abc import Iterable +from typing import NamedTuple, TypeVar + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer + +N = TypeVar('N', bound=tuple) + + +# XXX: we can't usefully describe the tuple type +class NamedTupleNCType(NCType[N]): + __slots__ = ('_is_hashable', '_args', '_actual_type') + + # we can't even parametrize NCType, lists are allowed in tuples and it's still hashable it just fails in runtime + _args: tuple[NCType, ...] + _actual_type: type[N] + + def __init__(self, namedtuple: type[N], args: Iterable[NCType]) -> None: + self._actual_type = namedtuple + self._args = tuple(args) + self._is_hashable = all(arg_nc_type.is_hashable() for arg_nc_type in self._args) + + @override + @classmethod + def _from_type(cls, type_: type[N], /, *, type_map: NCType.TypeMap) -> Self: + if not issubclass(type_, tuple) and NamedTuple not in getattr(cls, '__orig_bases__', tuple()): + raise TypeError('expected NamedTuple type') + args = [type_.__annotations__[field_name] for field_name in type_._fields] # type: ignore[attr-defined] + return cls(type_, (NCType.from_type(arg, type_map=type_map) for arg in args)) + + @override + def _check_value(self, value: N, /, *, deep: bool) -> None: + if not isinstance(value, (tuple, self._actual_type)): + raise TypeError('expected tuple or namedtuple') + # TODO: support default values + if len(value) != len(self._args): + raise TypeError('wrong number of arguments') + if deep: + for i, arg_nc_type in zip(value, self._args): + arg_nc_type._check_value(i, deep=True) + + @override + def _serialize(self, serializer: Serializer, value: N, /) -> None: + from hathor.serialization.compound_encoding.tuple import encode_tuple + encode_tuple(serializer, value, tuple(i.serialize for i in self._args)) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> N: + from hathor.serialization.compound_encoding.tuple import decode_tuple + return self._actual_type(*decode_tuple(deserializer, tuple(i.deserialize for i in self._args))) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> N: + if not isinstance(json_value, list): + raise ValueError('expected list') + return self._actual_type(*tuple(v.json_to_value(i) for (i, v) in zip(json_value, self._args))) + + @override + def _value_to_json(self, value: N) -> NCType.Json: + return [v.value_to_json(i) for (i, v) in zip(value, self._args)] diff --git a/hathor/nanocontracts/nc_types/nc_type.py b/hathor/nanocontracts/nc_types/nc_type.py new file mode 100644 index 000000000..432ee451e --- /dev/null +++ b/hathor/nanocontracts/nc_types/nc_type.py @@ -0,0 +1,201 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Generic, NamedTuple, TypeAlias, TypeVar, final + +from typing_extensions import Self + +from hathor.nanocontracts.nc_types.utils import TypeAliasMap, TypeToNCTypeMap, get_aliased_type, get_usable_origin_type +from hathor.serialization import Deserializer, Serializer + +T = TypeVar('T') + + +class NCType(ABC, Generic[T]): + """ This class is used to model a type with a known type signature and how it will be (de)serialized. + + It's used for modeling the serialization of NC method calls (the method signature defines the NCType), and also + used for modeling a the values that can go in immutable NC properties, and as key/value/members of mutable NC + properties (NC properties are modeled with the Field class, most of which make use of NCType classes). + + Instances of this class are not visible to blueprints, so don't need strong protections against private properties. + """ + + # These are all the values that can be observed when parsing a JSON with the builtin json module + # See: https://docs.python.org/3/library/json.html#encoders-and-decoders + # It is a shortcut to allow methods to talk about values that can be used with the json module + Json: TypeAlias = dict | list | str | int | float | bool | None + + class TypeMap(NamedTuple): + alias_map: TypeAliasMap + nc_types_map: TypeToNCTypeMap + + # XXX: subclasses must override this if they need any properties + __slots__ = () + + # XXX: subclasses must initialize this property + _is_hashable: bool + + @final + @staticmethod + def from_type(type_: type[T], /, *, type_map: TypeMap) -> NCType[T]: + """ Instantiate a NCType instance from a type signature using the given maps. + + A `type_nc_type_map` associates concrete types to concrete NCType classes, while a `type_alias_map` associate + types with substitute types to use instead. + """ + usable_origin = get_usable_origin_type(type_, type_map=type_map) + nc_type = type_map.nc_types_map[usable_origin] + # XXX: first we try to create the nc_type without making an alias, this ensures that an invalid annotation + # would not be accepted + _ = nc_type._from_type(type_, type_map=type_map) + # XXX: then we create the actual nc_type with type-alias + aliased_type = get_aliased_type(type_, type_map.alias_map) + return nc_type._from_type(aliased_type, type_map=type_map) + + @classmethod + def _from_type(cls, type_: type[T], /, *, type_map: TypeMap) -> Self: + """ Instantiate a NCType instance from a type signature. + + The implementation is expected to inspect the given type's origin and args to check for compatibility and to + decide on using `NCType.from_type`, forwarding the given `type_map` to continue instantiating NCType + specializations, this is the case particularly for compount NCTypes, like OptionalNCType or DictNCType. + """ + # XXX: a NCType that is only meant for local use does not need to implement _from_type + raise TypeError(f'{cls} is not compatible with use in a NCType.TypeMap') + + @final + def is_hashable(self) -> bool: + """ Indicates whether the type being abstracted over is expected to be hashable. + + This is used to help maintain prevent unhashable types from being used as keys in dicts or members in sets.""" + return self._is_hashable + + @final + def check_value(self, value: T, /) -> None: + """ Implementation should raise a TypeError if the value's type is not compatible. + + If `deep=True` then the check should recurse for compound types (like lists/maps) to check each value. It is + expected that `deep=False` is used in a context where the recursion would be made externally, so to avoid + checking the same value multiple times `deep=False` is used. + + A value being compatible is more than just having the correct instance, for example if the value is a dict, all + the dict's keys and values must be checked for compatibility. + """ + # XXX: subclasses must implement NCType._check_value, not NCType.check_value + self._check_value(value, deep=True) + + @final + def serialize(self, serializer: Serializer, value: T, /) -> None: + """ Serialize a value instance according to the signature that was abstracted. + + Serialization includes calling check_value while the value is being serialized, so calling check_value before + calling serialize is not needed. + """ + # XXX: subclasses must implement NCType._serialize, not NCType.serialize + self._check_value(value, deep=False) + self._serialize(serializer, value) + + @final + def deserialize(self, deserializer: Deserializer, /) -> T: + """ Deserialize a value instance according to the signature that was abstracted. + + Deserialization includes asserting check_value while the value is being deserialized, so calling check_value + after calling deserialize is not needed. Moreover, deserialization is already expected to produce valid values, + so checking is only made as a double check and results in AssertionError (no TypeError). + """ + # XXX: subclasses must implement NCType._deserialize, not NCType.deserialize + value = self._deserialize(deserializer) + self._check_value(value, deep=False) + return value + + @final + def to_bytes(self, value: T, /) -> bytes: + """ Shortcut to quickly convert a value T to `bytes` and avoid using the serialization system. + """ + serializer = Serializer.build_bytes_serializer() + self.serialize(serializer, value) + return bytes(serializer.finalize()) + + @final + def from_bytes(self, data: bytes, /) -> T: + """ Shortcut to quickly parse a value T from `bytes` and avoid using the serialization system. + """ + deserializer = Deserializer.build_bytes_deserializer(data) + value = self.deserialize(deserializer) + deserializer.finalize() + return value + + @final + def json_to_value(self, json_value: Json, /) -> T: + """ Use this to convert a value that comes out from `json.load` into the value that this class expects. + + Will raise a ValueError if the given `json_value` is not compatible. + """ + # XXX: subclasses must implement NCType._json_to_value, not NCType.json_to_value + value = self._json_to_value(json_value) + self._check_value(value, deep=False) + return value + + @final + def value_to_json(self, value: T, /) -> Json: + """ Use this to convert a value to an object compatible with `json.dump`. + + Will raise a ValueError if the given `value` is not compatible. + """ + # XXX: subclasses must implement NCType._value_to_json, not NCType.value_to_json + self._check_value(value, deep=False) + json_value = self._value_to_json(value) + return json_value + + @abstractmethod + def _check_value(self, value: T, /, *, deep: bool) -> None: + """ Inner implementation of `NCType.check_value`, should return True is the given value is valid. + + Compound values should use `NCType._check_value` on the inner type(s) instead of `NCType.check_value` and pass + the appropriate deep argument. + """ + raise NotImplementedError + + @abstractmethod + def _serialize(self, serializer: Serializer, value: T, /) -> None: + """ Inner implementation of `serialize`, you can assume that the give value has been "shallow checked". + + When implementing the serialization with compound encoders, `NCType.serialize` should be passed as an `Encoder` + instead of `NCType._serialize`, by passing `NCType.serialize` the next `Vallue._serialize` implementation will + be able to assume that the value was checked. + """ + raise NotImplementedError + + @abstractmethod + def _deserialize(self, deserializer: Deserializer, /) -> T: + """ Inner implementation of `deserialize`, it is expected that deserializers always produce valid values. + + Even then, `NCType.deserialize` should be passed as a `Decoder`, that way it's possible to do a "shallow check" + for asserting that a valid value was produced. + """ + raise NotImplementedError + + # this are optional to implement, but provide the ability to convert to/from JSON + + def _json_to_value(self, json: Json, /) -> T: + """ Inner implementation of `NCType.json_to_value`.""" + raise ValueError('this class does not support JSON conversion') + + def _value_to_json(self, value: T, /) -> Json: + """ Inner implementation of `NCType.value_to_json`.""" + raise ValueError('this class does not support JSON conversion') diff --git a/hathor/nanocontracts/nc_types/null_nc_type.py b/hathor/nanocontracts/nc_types/null_nc_type.py new file mode 100644 index 000000000..26900e0fe --- /dev/null +++ b/hathor/nanocontracts/nc_types/null_nc_type.py @@ -0,0 +1,59 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from types import NoneType + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer + + +class NullNCType(NCType[None]): + _is_hashable = True + + @override + @classmethod + def _from_type(cls, type_: type[None], /, *, type_map: NCType.TypeMap) -> Self: + # XXX: usually we expect NoneType as type_, but in some cases it can come-in as None, and we take that too + if type_ is None or type_ is NoneType: + return cls() + raise TypeError('expected None type') + + @override + def _check_value(self, value: None, /, *, deep: bool) -> None: + if value is not None: + raise TypeError('expected None') + + @override + def _serialize(self, serializer: Serializer, value: None, /) -> None: + # XXX: zero sized serialization, nothing to do + pass + + @override + def _deserialize(self, deserializer: Deserializer, /) -> None: + # XXX: zero sized serialization, nothing to do + pass + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> None: + if json_value is not None: + raise ValueError('expected None/null') + return None + + @override + def _value_to_json(self, value: None, /) -> NCType.Json: + return None diff --git a/hathor/nanocontracts/nc_types/optional_nc_type.py b/hathor/nanocontracts/nc_types/optional_nc_type.py new file mode 100644 index 000000000..f90cf3522 --- /dev/null +++ b/hathor/nanocontracts/nc_types/optional_nc_type.py @@ -0,0 +1,82 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from types import NoneType, UnionType +# XXX: ignore attr-defined because mypy doesn't recognize it, even though all version of python that we support; have +# this defined, even if it's an internal class +from typing import TypeVar, _UnionGenericAlias as UnionGenericAlias, get_args # type: ignore[attr-defined] + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.compound_encoding.optional import decode_optional, encode_optional + +V = TypeVar('V') + + +class OptionalNCType(NCType[V | None]): + """ Represents a nc_type that is either `V` or `None`. + """ + + __slots__ = ('_is_hashable', '_value') + + _value: NCType[V] + + def __init__(self, nc_type: NCType[V]) -> None: + self._value = nc_type + self._is_hashable = nc_type.is_hashable() + + @override + @classmethod + def _from_type(cls, type_: type[V | None], /, *, type_map: NCType.TypeMap) -> Self: + if not isinstance(type_, (UnionType, UnionGenericAlias)): + raise TypeError('expected type union') + args = get_args(type_) + assert args, 'union always has args' + if len(args) != 2 or NoneType not in args: + raise TypeError('type must be either `None | T` or `T | None`') + not_none_type, = tuple(set(args) - {NoneType}) # get the type that is not None + return cls(NCType.from_type(not_none_type, type_map=type_map)) + + @override + def _check_value(self, value: V | None, /, *, deep: bool) -> None: + if value is None: + return + if deep: + self._value._check_value(value, deep=True) + + @override + def _serialize(self, serializer: Serializer, value: V | None, /) -> None: + encode_optional(serializer, value, self._value.serialize) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> V | None: + return decode_optional(deserializer, self._value.deserialize) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> V | None: + if json_value is None: + return None + else: + return self._value.json_to_value(json_value) + + @override + def _value_to_json(self, value: V | None, /) -> NCType.Json: + if value is None: + return None + else: + return self._value.value_to_json(value) diff --git a/hathor/nanocontracts/nc_types/signed_data_nc_type.py b/hathor/nanocontracts/nc_types/signed_data_nc_type.py new file mode 100644 index 000000000..4ecd9839e --- /dev/null +++ b/hathor/nanocontracts/nc_types/signed_data_nc_type.py @@ -0,0 +1,90 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TypeVar + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.nanocontracts.types import SignedData +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.compound_encoding.signed_data import decode_signed_data, encode_signed_data +from hathor.utils.typing import get_args, get_origin + +V = TypeVar('V', bound=NCType) + + +class SignedDataNCType(NCType[SignedData[V]]): + """ Represents a SignedData[*] values. + """ + __slots__ = ('_is_hashable', '_value', '_inner_type') + + _value: NCType[V] + _inner_type: type[V] + + def __init__(self, inner_nc_type: NCType[V], inner_type: type[V], /) -> None: + self._value = inner_nc_type + self._is_hashable = inner_nc_type.is_hashable() + self._inner_type = inner_type + + @override + @classmethod + def _from_type(cls, type_: type[SignedData[V]], /, *, type_map: NCType.TypeMap) -> Self: + origin_type = get_origin(type_) or type_ + if not issubclass(origin_type, SignedData): + raise TypeError('expected SignedData type') + args: tuple[type, ...] = get_args(type_) or tuple() + if len(args) != 1: + raise TypeError('expected one type argument') + inner_type, = args + return cls(NCType.from_type(inner_type, type_map=type_map), inner_type) + + @override + def _check_value(self, value: SignedData[V], /, *, deep: bool) -> None: + if not isinstance(value, SignedData): + raise TypeError('expected SignedData') + if deep: + self._value._check_value(value.data, deep=True) + + @override + def _serialize(self, serializer: Serializer, value: SignedData[V], /) -> None: + encode_signed_data(serializer, value, self._value.serialize) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> SignedData[V]: + return decode_signed_data(deserializer, self._value.deserialize, self._inner_type) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> SignedData[V]: + if not isinstance(json_value, list): + raise ValueError('expected list') + if len(json_value) != 2: + raise ValueError('expected list of 2 elements') + inner_json_value, signature_json_value = json_value + data = self._value.json_to_value(inner_json_value) + if not isinstance(signature_json_value, str): + raise ValueError('expected str for signature') + script_input = bytes.fromhex(signature_json_value) + # XXX: ignore named-defined because mypy doesn't recognize self._inner_type + # NOTE: strangely enough it gives a name-defined error but in some nearly identical situations it gives a + # valid-type error + return SignedData[self._inner_type](data, script_input) # type: ignore[name-defined] + + @override + def _value_to_json(self, value: SignedData[V], /) -> NCType.Json: + inner_json_value = self._value.value_to_json(value.data) + signature_json_value = value.script_input.hex() + return [inner_json_value, signature_json_value] diff --git a/hathor/nanocontracts/nc_types/sized_int_nc_type.py b/hathor/nanocontracts/nc_types/sized_int_nc_type.py new file mode 100644 index 000000000..d19812e5d --- /dev/null +++ b/hathor/nanocontracts/nc_types/sized_int_nc_type.py @@ -0,0 +1,103 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import ClassVar + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.int import decode_int, encode_int +from hathor.utils.typing import is_subclass + + +class _SizedIntNCType(NCType[int]): + """ Base class for classes that represent builtin `int` values with a fixed size and signedness. + """ + + _is_hashable = True + # XXX: subclass must define these values: + _signed: ClassVar[bool] + _byte_size: ClassVar[int] + + @classmethod + def _upper_bound_value(self) -> int | None: + if self._byte_size is None: + return None + if self._signed: + return 2**(self._byte_size * 8 - 1) - 1 + else: + return 2**(self._byte_size * 8) - 1 + + @classmethod + def _lower_bound_value(self) -> int | None: + if self._byte_size is None: + return None + if self._signed: + return -(2**(self._byte_size * 8 - 1)) + else: + return 0 + + @override + @classmethod + def _from_type(cls, type_: type[int], /, *, type_map: NCType.TypeMap) -> Self: + if not is_subclass(type_, int): + raise TypeError('expected int type') + return cls() + + @override + def _check_value(self, value: int, /, *, deep: bool) -> None: + if not isinstance(value, int): + raise TypeError('expected integer') + self._check_range(value) + + def _check_range(self, value: int) -> None: + upper_bound = self._upper_bound_value() + lower_bound = self._lower_bound_value() + if upper_bound is not None and value > upper_bound: + raise ValueError('above upper bound') + if lower_bound is not None and value < lower_bound: + raise ValueError('below lower bound') + + @override + def _serialize(self, serializer: Serializer, value: int, /) -> None: + encode_int(serializer, value, length=self._byte_size, signed=self._signed) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> int: + return decode_int(deserializer, length=self._byte_size, signed=self._signed) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> int: + # XXX: should we support str? + if not isinstance(json_value, int): + raise ValueError('expected int') + return json_value + + @override + def _value_to_json(self, value: int, /) -> NCType.Json: + # XXX: should we support str? + return value + + +class Int32NCType(_SizedIntNCType): + _signed = True + _byte_size = 4 # 4-bytes -> 32-bits + + +class Uint32NCType(_SizedIntNCType): + _signed = False + _byte_size = 4 # 4-bytes -> 32-bits diff --git a/hathor/nanocontracts/nc_types/str_nc_type.py b/hathor/nanocontracts/nc_types/str_nc_type.py new file mode 100644 index 000000000..e32781df9 --- /dev/null +++ b/hathor/nanocontracts/nc_types/str_nc_type.py @@ -0,0 +1,59 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.consts import DEFAULT_BYTES_MAX_LENGTH +from hathor.serialization.encoding.utf8 import decode_utf8, encode_utf8 + + +class StrNCType(NCType[str]): + """ Represents builtin `str` values. + """ + + _is_hashable = True + + @override + @classmethod + def _from_type(cls, type_: type[str], /, *, type_map: NCType.TypeMap) -> Self: + if type_ is not str: + raise TypeError('expected str type') + return cls() + + @override + def _check_value(self, value: str, /, *, deep: bool) -> None: + if not isinstance(value, str): + raise TypeError('excpected str type') + + @override + def _serialize(self, serializer: Serializer, value: str, /) -> None: + encode_utf8(serializer.with_max_bytes(DEFAULT_BYTES_MAX_LENGTH), value) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> str: + return decode_utf8(deserializer.with_max_bytes(DEFAULT_BYTES_MAX_LENGTH)) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> str: + if not isinstance(json_value, str): + raise ValueError('expected str') + return json_value + + @override + def _value_to_json(self, value: str, /) -> NCType.Json: + return value diff --git a/hathor/nanocontracts/nc_types/token_uid_nc_type.py b/hathor/nanocontracts/nc_types/token_uid_nc_type.py new file mode 100644 index 000000000..e7b3185ea --- /dev/null +++ b/hathor/nanocontracts/nc_types/token_uid_nc_type.py @@ -0,0 +1,88 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing_extensions import Self, override + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts.nc_types.fixed_size_bytes_nc_type import Bytes32NCType +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.nanocontracts.types import TokenUid +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.compound_encoding.optional import decode_optional, encode_optional +from hathor.utils.typing import is_subclass + +TOKEN_SIZE = 32 +HATHOR_TOKEN_HEX = HATHOR_TOKEN_UID.hex() + + +class TokenUidNCType(NCType[TokenUid]): + _is_hashable = True + + def __init__(self) -> None: + self._bytes32_nc_type = Bytes32NCType(bytes) + + @override + @classmethod + def _from_type(cls, type_: type[TokenUid], /, *, type_map: NCType.TypeMap) -> Self: + # XXX: TokenUid is a NewType it cannot be used to make this check, when we have a custom class it will be + # possible to use it here instead of bytes + if not is_subclass(type_, bytes): + raise TypeError('expected bytes type') + return cls() + + @override + def _check_value(self, value: TokenUid, /, *, deep: bool) -> None: + if not isinstance(value, bytes): + raise TypeError('expected bytes instance') + data = bytes(value) + if data == HATHOR_TOKEN_UID: + return + elif len(data) != TOKEN_SIZE: + raise TypeError( + f'value has {len(value)} bytes, expected ' + f'TokenUid to always have {TOKEN_SIZE} bytes' + ) + + @override + def _serialize(self, serializer: Serializer, value: TokenUid, /) -> None: + # TokenUid is mapped to bytes | None, None represents the native token + raw_value: bytes | None = None if value == HATHOR_TOKEN_UID else value + encode_optional(serializer, raw_value, self._bytes32_nc_type.serialize) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> TokenUid: + # bytes | None is mapped back to TokenUid, None represents the native token + raw_value = decode_optional(deserializer, self._bytes32_nc_type.deserialize) + value = HATHOR_TOKEN_UID if raw_value is None else raw_value + return TokenUid(value) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> TokenUid: + if not isinstance(json_value, str): + raise ValueError('expected str') + if json_value == HATHOR_TOKEN_HEX: + return TokenUid(HATHOR_TOKEN_UID) + data = bytes.fromhex(json_value) + if len(data) != TOKEN_SIZE: + raise ValueError('TokenUid must either be a null byte or have 32 bytes') + return TokenUid(data) + + @override + def _value_to_json(self, data: TokenUid, /) -> NCType.Json: + if data == HATHOR_TOKEN_UID: + return HATHOR_TOKEN_HEX + else: + return data.hex() diff --git a/hathor/nanocontracts/nc_types/tuple_nc_type.py b/hathor/nanocontracts/nc_types/tuple_nc_type.py new file mode 100644 index 000000000..85f1c1bc7 --- /dev/null +++ b/hathor/nanocontracts/nc_types/tuple_nc_type.py @@ -0,0 +1,120 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections.abc import Iterable +from typing import get_args, get_origin + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer + + +# XXX: we can't usefully describe the tuple type +class TupleNCType(NCType[tuple]): + """ Represents tuple values, which can either be homogeneous-type variable size or heterogeneous-type fixed size. + """ + + __slots__ = ('_is_hashable', '_varsize', '_args') + + _varsize: bool + # we can't even parametrize NCType, lists are allowed in tuples and it's still hashable it just fails in runtime + _args: tuple[NCType, ...] + + def __init__(self, args: NCType | Iterable[NCType]) -> None: + if isinstance(args, Iterable): + self._varsize = False + self._args = tuple(args) + for arg in self._args: + assert isinstance(arg, NCType) + self._is_hashable = all(arg_nc_type.is_hashable() for arg_nc_type in self._args) + else: + assert isinstance(args, NCType) + self._varsize = True + self._args = (args,) + self._is_hashable = args.is_hashable() + + @override + @classmethod + def _from_type(cls, type_: type[tuple], /, *, type_map: NCType.TypeMap) -> Self: + origin_type: type = get_origin(type_) or type_ + if not issubclass(origin_type, (tuple, list)): + raise TypeError('expected tuple-like type') + args = list(get_args(type_)) + if args is None: + raise TypeError('expected tuple[]') + if issubclass(type_, list): + args.append(Ellipsis) + if args and args[-1] == Ellipsis: + if len(args) != 2: + raise TypeError('ellipsis only allowed with one type: tuple[T, ...]') + arg, _ellipsis = args + return cls(NCType.from_type(arg, type_map=type_map)) + else: + return cls(NCType.from_type(arg, type_map=type_map) for arg in args) + + @override + def _check_value(self, value: tuple, /, *, deep: bool) -> None: + if not isinstance(value, (tuple, list)): + raise TypeError('expected tuple-like') + if deep: + if self._varsize: + arg_nc_type, = self._args + for i in value: + arg_nc_type._check_value(i, deep=True) + else: + if len(value) != len(self._args): + raise TypeError('wrong tuple size') + for i, arg_nc_type in zip(value, self._args): + arg_nc_type._check_value(i, deep=True) + + @override + def _serialize(self, serializer: Serializer, value: tuple, /) -> None: + from hathor.serialization.compound_encoding.collection import encode_collection + from hathor.serialization.compound_encoding.tuple import encode_tuple + if self._varsize: + assert len(self._args) == 1 + encode_collection(serializer, value, self._args[0].serialize) + else: + encode_tuple(serializer, value, tuple(i.serialize for i in self._args)) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> tuple: + from hathor.serialization.compound_encoding.collection import decode_collection + from hathor.serialization.compound_encoding.tuple import decode_tuple + if self._varsize: + assert len(self._args) == 1 + return decode_collection(deserializer, self._args[0].deserialize, tuple) + else: + return decode_tuple(deserializer, tuple(i.deserialize for i in self._args)) + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> tuple: + if not isinstance(json_value, list): + raise ValueError('expected list') + if self._varsize: + assert len(self._args) == 1 + return tuple(self._args[0].json_to_value(i) for i in json_value) + else: + return tuple(v.json_to_value(i) for (i, v) in zip(json_value, self._args)) + + @override + def _value_to_json(self, value: tuple, /) -> NCType.Json: + if self._varsize: + assert len(self._args) == 1 + return [self._args[0].value_to_json(i) for i in value] + else: + return [v.value_to_json(i) for (i, v) in zip(value, self._args)] diff --git a/hathor/nanocontracts/nc_types/utils.py b/hathor/nanocontracts/nc_types/utils.py new file mode 100644 index 000000000..48cc9a309 --- /dev/null +++ b/hathor/nanocontracts/nc_types/utils.py @@ -0,0 +1,254 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections.abc import Hashable, Mapping +from functools import reduce +from operator import or_ +from types import MappingProxyType as mappingproxy, NoneType, UnionType +# XXX: ignore attr-defined because mypy doesn't recognize it, even though all version of python that we support; have +# this defined, even if it's an internal class +from typing import _UnionGenericAlias # type: ignore[attr-defined] +from typing import TYPE_CHECKING, Iterator, NamedTuple, TypeAlias, TypeVar, Union, cast + +from structlog import get_logger + +from hathor.utils.typing import get_args, get_origin, is_subclass + +if TYPE_CHECKING: + from hathor.nanocontracts.nc_types import NCType + + +logger = get_logger() + +T = TypeVar('T') +TypeAliasMap: TypeAlias = Mapping[type | UnionType, type] +TypeToNCTypeMap: TypeAlias = Mapping[type | UnionType, type['NCType']] + + +def get_origin_classes(type_: type) -> Iterator[type]: + """ This util function is useful to generalize over a type T and unions A | B. + + A simple type T would be yielded directly, and an union will yield each type in it. This way if you need to check a + property either on a type that should be checked for each element in an union, this function simplifies the + process. Also, only origin types are yielded, arguments are discarded, because normally that's what's needed in + those situations. + + It is guaranteed that each yielded type is not an UnionType. + + XXX: on IPython [int] gets represented as '[int]', however on the Python terminal it shows as "" + because that's what's returned by repr/str, so these doctests are formatted in the way that Python would format it. + + >>> list(get_origin_classes(int)) + [] + >>> list(get_origin_classes(int | str)) + [, ] + >>> list(get_origin_classes(set)) + [] + >>> list(get_origin_classes(set[int])) + [] + >>> list(get_origin_classes(set | dict)) + [, ] + >>> list(get_origin_classes(set[int] | dict[int, str])) + [, ] + """ + origin_type: type = get_origin(type_) or type_ + if origin_type is UnionType: + for arg_type in get_args(type_) or tuple(): + origin_arg_type: type = get_origin(arg_type) or arg_type + assert origin_arg_type is not UnionType, 'this is impossible to construct' + yield origin_arg_type + else: + yield origin_type + + +def is_origin_hashable(type_: type) -> bool: + """ Checks whether the given type signature satisfies `collections.abc.Hashable`. + + This check ignores type arguments, but takes into account all types of an union. + + >>> is_origin_hashable(int) + True + >>> is_origin_hashable(str) + True + >>> is_origin_hashable(bytes) + True + >>> is_origin_hashable(int | str | bytes) + True + >>> is_origin_hashable(int | str | bytes | set) + False + >>> is_origin_hashable(int | str | bytes | frozenset) + True + >>> is_origin_hashable(set) + False + >>> is_origin_hashable(set[int]) + False + >>> is_origin_hashable(frozenset) + True + >>> is_origin_hashable(frozenset[int]) + True + >>> is_origin_hashable(frozenset[int]) + True + >>> is_origin_hashable(dict) + False + >>> is_origin_hashable(mappingproxy) + False + >>> is_origin_hashable(list) + False + >>> is_origin_hashable(tuple) + True + + Even though list is not hashable, a frozenset[list] is, simply because arguments are ignored: + >>> is_origin_hashable(frozenset[list]) + True + + Callers should recurse on their own if they need to deal with type arguments. In practice when building a NCType + from a type the recursion of the build process will deal with that. + """ + return all(_is_origin_hashable(origin_class) for origin_class in get_origin_classes(type_)) + + +def _is_origin_hashable(origin_class: type) -> bool: + """ Inner implementation of is_origin_hashable, only checks a single origin class. """ + # XXX: on Python 3.11, `is_subclass(mappingproxy, Hashable) == False`, but on Python 3.12 it's `True`, in practice, + # for all the cases that we support `hash(mapping_proxy_instance)` fails with a `TypeError`, so `False` is the + # most useful result, even if there are technical reasons for why it should be `True` in 3.12 + # XXX: even though mappingproxy is not supported, this behavior is now consistent between different Python versions + if origin_class is mappingproxy: + return False + return is_subclass(origin_class, Hashable) + + +def pretty_type(type_: type | UnionType) -> str: + """ Shows a cleaner string representation for a type. + """ + if type_ is NoneType or type_ is None: + return 'None' + elif hasattr(type_, '__args__'): + return str(type_) + else: + return type_.__name__ + + +# XXX: _verbose argument is used to help with doctest +def get_aliased_type(type_: type | UnionType, alias_map: TypeAliasMap, *, _verbose: bool = True) -> type: + """ Map a type to its usable alias including the type's arguments. + + For example, `set` is mapped to `frozenset` in the default alias map: + + >>> orig_type = tuple[str, frozenset[set[dict[int, set[str]]]], set[int], bool] + >>> from hathor.nanocontracts.nc_types import DEFAULT_TYPE_ALIAS_MAP as alias_map + >>> get_aliased_type(orig_type, alias_map, _verbose=False) + tuple[str, frozenset[frozenset[dict[int, frozenset[str]]]], frozenset[int], bool] + """ + new_type, replaced = _get_aliased_type(type_, alias_map) + if replaced and _verbose: + logger.debug('type replaced', old=pretty_type(type_), new=pretty_type(new_type)) + return new_type + + +def _get_aliased_type(type_: type | UnionType, alias_map: TypeAliasMap) -> tuple[type, bool]: + """ Implementation of get_aliased_type with indication of whether there was a replacement. + """ + origin_type = get_origin(type_) or type_ + # XXX: special case, replace typing.Union with types.UnionType + aliased_origin: type + replaced = False + + if origin_type is Union: + aliased_origin = UnionType + elif origin_type in alias_map: + aliased_origin = alias_map[origin_type] + replaced = True + else: + # XXX: erase UnionType from origin_type, it only gets in the way further on + aliased_origin = cast(type, origin_type) + + if hasattr(type_, '__args__'): + type_args = get_args(type_) + assert isinstance(type_args, tuple) + + # use _get_aliased_type for recursion so we don't warn multiple times when a replacement happens + # aliased_args_replaced is list of [(arg1, replaced1), (arg2, replaced2), ...] + aliased_args_replaced = [_get_aliased_type(arg, alias_map) for arg in type_args] + # unzip the list so we have [arg1, arg2, ...] and [replaced1, replaced2, ...] + aliased_args, args_replaced = zip(*aliased_args_replaced) + # update replaced status + replaced |= any(args_replaced) + + # XXX: special case, UnionType can't be instantiated directly, this is the simplest way to do it + if aliased_origin is UnionType: + final_type = reduce(or_, aliased_args) # = type_args[0] | type_args[1] | ... | type_args[N] + # XXX: for some reason, only sometimes doing T | None, results in typing.Union instead of types.UnionType + assert isinstance(final_type, (UnionType, _UnionGenericAlias)), '| of types results in union' + return final_type, replaced + + # XXX: special case, when going from list -> tuple, we need to add an ellipsis, that is to say, the equivalent + # type for `list[T]` is `tuple[T, ...]` + elif isinstance(origin_type, type) and issubclass(origin_type, list) and issubclass(aliased_origin, tuple): + if len(aliased_args) != 1: + raise TypeError('to make an alias from `list` to `tuple` exactly 1 argument is required') + aliased_arg, = aliased_args + return aliased_origin[aliased_arg, ...], replaced # type: ignore[index] + + # normal case when there are type arguments (even if the arguments are empty, like tuple[()]) + # XXX: ignore index because mypy doesn't know aliased_origin is indexable even with the assert + else: + assert hasattr(aliased_origin, '__class_getitem__'), 'we must have an indexable class at this point' + new_type = aliased_origin[*aliased_args] # type: ignore[index] + return new_type, replaced + else: + # normal case when there aren't type arguments + return aliased_origin, replaced + + +def get_usable_origin_type( + type_: type[T] | UnionType, + /, + *, + type_map: 'NCType.TypeMap', + _verbose: bool = True, +) -> type: + """ The purpose of this function is to map a given type into a type that is usable in a NCType.TypeMap + + It takes into account type-aliasing according to NCType.TypeMap.alias_map. If the given type cannot be used in the + given type_map, a TypeError exception will be raised. + + The returned type is such that it is guaranteed to exist in `type_map.nc_types_map`. + + For example, if we have a type `set[int]` it cannot be used to index the default types map, its origin + however, is `dict`, which also isn't in the default map, but after applying the alias it becomes a `frozenset`, + which is in the default map, `get_usable_origin_type` is a shortcut for doing this consistently and also raising a + `TypeError` to indicate that the given type is not supported: + + >>> type_ = set[int] + >>> from hathor.nanocontracts.nc_types import _FIELD_TYPE_MAP as default_type_map + >>> origin = get_usable_origin_type(type_, type_map=default_type_map, _verbose=False) + >>> assert origin in default_type_map.nc_types_map + >>> origin + + """ + if isinstance(type_, str): + raise NotImplementedError('string annotations are not currently supported') + + # if we have a `dict[int, int]` we use `get_origin()` to get the `dict` part, since it's a different instance + aliased_type: type = get_aliased_type(type_, type_map.alias_map, _verbose=_verbose) + origin_aliased_type: type = get_origin(aliased_type) or aliased_type + + if origin_aliased_type in type_map.nc_types_map: + return origin_aliased_type + + if NamedTuple in type_map.nc_types_map and NamedTuple in getattr(type_, '__orig_bases__', tuple()): + return NamedTuple + + raise TypeError(f'type {type_} is not supported by any NCType class') diff --git a/hathor/nanocontracts/nc_types/varint_nc_type.py b/hathor/nanocontracts/nc_types/varint_nc_type.py new file mode 100644 index 000000000..2608f59a5 --- /dev/null +++ b/hathor/nanocontracts/nc_types/varint_nc_type.py @@ -0,0 +1,118 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import ClassVar + +from typing_extensions import Self, override + +from hathor.nanocontracts.nc_types.nc_type import NCType +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.adapters import MaxBytesExceededError +from hathor.serialization.encoding.leb128 import decode_leb128, encode_leb128 +from hathor.utils.typing import is_subclass + + +class _VarIntNCType(NCType[int]): + _is_hashable = True + # XXX: subclass must define these values: + _signed: ClassVar[bool] + _max_byte_size: ClassVar[int | None] + + @classmethod + def _upper_bound_value(self) -> int | None: + if self._max_byte_size is None: + return None + if self._signed: + return 2**(self._max_byte_size * 7 - 1) - 1 + else: + return 2**(self._max_byte_size * 7) - 1 + + @classmethod + def _lower_bound_value(self) -> int | None: + if not self._signed: + return 0 + if self._max_byte_size is not None: + return -(2**(self._max_byte_size * 7)) + else: + return None + + @override + @classmethod + def _from_type(cls, type_: type[int], /, *, type_map: NCType.TypeMap) -> Self: + if not is_subclass(type_, int): + raise TypeError('expected int type') + return cls() + + @override + def _check_value(self, value: int, /, *, deep: bool) -> None: + if not isinstance(value, int): + raise TypeError('expected integer') + self._check_range(value) + + def _check_range(self, value: int) -> None: + upper_bound = self._upper_bound_value() + lower_bound = self._lower_bound_value() + if upper_bound is not None and value > upper_bound: + raise ValueError('above upper bound') + if lower_bound is not None and value < lower_bound: + raise ValueError('below lower bound') + + @override + def _serialize(self, serializer: Serializer, value: int, /) -> None: + if self._max_byte_size is not None: + serializer = serializer.with_max_bytes(self._max_byte_size) + try: + encode_leb128(serializer, value, signed=self._signed) + except MaxBytesExceededError as e: + raise ValueError('value too long') from e + + @override + def _deserialize(self, deserializer: Deserializer, /) -> int: + if self._max_byte_size is not None: + deserializer = deserializer.with_max_bytes(self._max_byte_size) + try: + value = decode_leb128(deserializer, signed=self._signed) + except MaxBytesExceededError as e: + raise ValueError('value too long') from e + return value + + @override + def _json_to_value(self, json_value: NCType.Json, /) -> int: + # XXX: should we drop support for int? + if not isinstance(json_value, (int, str)): + raise ValueError('expected int or str') + return int(json_value) + + @override + def _value_to_json(self, value: int, /) -> NCType.Json: + # XXX: should we use str instead? + return value + + +class VarInt32NCType(_VarIntNCType): + """Variable-size signed integer with at most 32 bytes, effectively 223 bits + sign bit. + """ + + _signed = True + _max_byte_size = 32 + + +class VarUint32NCType(_VarIntNCType): + """Variable-size unsigned integer with at most 32 bytes. + """ + + _signed = False + _max_byte_size = 32 diff --git a/hathor/nanocontracts/on_chain_blueprint.py b/hathor/nanocontracts/on_chain_blueprint.py new file mode 100644 index 000000000..0eef02261 --- /dev/null +++ b/hathor/nanocontracts/on_chain_blueprint.py @@ -0,0 +1,408 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import ast +import zlib +from dataclasses import InitVar, dataclass, field +from enum import IntEnum, unique +from typing import TYPE_CHECKING, Any, Optional + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec +from structlog import get_logger +from typing_extensions import Self, override + +from hathor.conf.get_settings import get_global_settings +from hathor.crypto.util import get_address_b58_from_public_key_bytes, get_public_key_bytes_compressed +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.exception import OCBOutOfFuelDuringLoading, OCBOutOfMemoryDuringLoading +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.types import BlueprintId, blueprint_id_from_bytes +from hathor.transaction import Transaction, TxInput, TxOutput, TxVersion +from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len + +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.storage import NCContractStorage # noqa: F401 + from hathor.transaction.storage import TransactionStorage # noqa: F401 + +logger = get_logger() + +# used to allow new versions of the serialization format in the future +ON_CHAIN_BLUEPRINT_VERSION: int = 1 + +# this is the name we expect the source code to expose for the Blueprint class +BLUEPRINT_CLASS_NAME: str = '__blueprint__' + +# source compatibility with Python 3.11 +PYTHON_CODE_COMPAT_VERSION = (3, 11) + +# max compression level, used as default +MAX_COMPRESSION_LEVEL = 9 + +# this is what's allowed to be imported, to be checked in the AST and in runtime +ALLOWED_IMPORTS: dict[str, set[str]] = { + # globals + 'math': {'ceil', 'floor'}, + 'typing': {'Optional', 'NamedTuple', 'TypeAlias', 'Union'}, + 'collections': {'OrderedDict'}, + # hathor + 'hathor.nanocontracts': {'Blueprint'}, + 'hathor.nanocontracts.blueprint': {'Blueprint'}, + 'hathor.nanocontracts.context': {'Context'}, + 'hathor.nanocontracts.exception': {'NCFail'}, + 'hathor.nanocontracts.types': { + 'NCAction', + 'NCActionType', + 'SignedData', + 'public', + 'view', + 'fallback', + 'Address', + 'Amount', + 'Timestamp', + 'TokenUid', + 'TxOutputScript', + 'BlueprintId', + 'ContractId', + 'VertexId', + 'NCDepositAction', + 'NCWithdrawalAction', + 'NCGrantAuthorityAction', + 'NCAcquireAuthorityAction', + 'NCArgs', + 'NCRawArgs', + 'NCParsedArgs', + }, +} + +# these names aren't allowed in the code, to be checked in the AST only +AST_NAME_BLACKLIST: set[str] = { + '__builtins__', + '__import__', + 'compile', + 'delattr', + 'dir', + 'eval', + 'exec', + 'getattr', + 'globals', + 'hasattr', + 'input', + 'locals', + 'open', + 'setattr', + 'vars', + 'type', + 'object', + 'super', +} + + +@unique +class CodeKind(IntEnum): + """ Represents what type of code and format is being used, to allow new code/compression types in the future. + """ + + PYTHON_ZLIB = 1 + + def __bytes__(self) -> bytes: + return int_to_bytes(number=self.value, size=1) + + +def _compress_code(content: str, compress_level: int) -> bytes: + # XXX: zlib is gzip compatible and compresses slightly better + return zlib.compress(content.encode('utf-8'), level=compress_level) + + +def _decompress_code(data: bytes, max_length: int) -> str: + dcobj = zlib.decompressobj() + content = dcobj.decompress(data, max_length=max_length) + if dcobj.unconsumed_tail: + raise ValueError('Decompressed code is too long.') + return content.decode('utf-8') + + +@dataclass(frozen=True) +class Code: + """ Store the code object in memory, along with helper methods. + """ + + # determines how the content will be interpreted + kind: CodeKind + + # the encoded content, usually encoded implies compressed + data: bytes + + # pre-decompressed content, for faster access + text: str = field(init=False) + + # only needed for initialization, to decompress the original data + settings: InitVar[HathorSettings] + + def __post_init__(self, settings: HathorSettings) -> None: + # used to initialize self.text with + match self.kind: + case CodeKind.PYTHON_ZLIB: + text = _decompress_code(self.data, settings.NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_UNCOMPRESSED) + # set self.text using object.__setattr__ to bypass frozen protection + object.__setattr__(self, 'text', text) + case _: + raise ValueError('Invalid code kind value') + + def __bytes__(self) -> bytes: + # Code serialization format: [kind:variable bytes][null byte][data:variable bytes] + if self.kind is not CodeKind.PYTHON_ZLIB: + raise ValueError('Invalid code kind value') + buf = bytearray() + buf.extend(bytes(self.kind)) + buf.extend(self.data) + return bytes(buf) + + @classmethod + def from_bytes(cls, data: bytes, settings: HathorSettings) -> Self: + """ Parses a Code instance from a byte sequence, the length of the data is encoded outside of this class. + + NOTE: This will not validate whether the encoded has a valid compression format. A Validator must be used to + check that. + """ + data = bytearray(data) + kind = CodeKind(data[0]) + if kind is not CodeKind.PYTHON_ZLIB: + raise ValueError('Code kind not supported') + compressed_code = data[1:] + return cls(kind, compressed_code, settings) + + @classmethod + def from_python_code( + cls, + text_code: str, + settings: HathorSettings, + *, + compress_level: int = MAX_COMPRESSION_LEVEL, + ) -> Self: + data = _compress_code(text_code, compress_level) + return cls(CodeKind.PYTHON_ZLIB, data, settings) + + def to_json(self) -> dict[str, Any]: + """ Simple json view.""" + import base64 + return { + 'kind': self.kind.value, + 'content': base64.b64encode(self.data).decode('ascii'), + } + + def to_json_extended(self) -> dict[str, Any]: + """ Extended json view, includes content in text form.""" + return { + **self.to_json(), + 'content_text': self.text, + } + + +class OnChainBlueprint(Transaction): + """On-chain blueprint vertex to be placed on the DAG of transactions.""" + + MIN_NUM_INPUTS = 0 + + def __init__( + self, + nonce: int = 0, + timestamp: Optional[int] = None, + version: TxVersion = TxVersion.ON_CHAIN_BLUEPRINT, + weight: float = 0, + inputs: Optional[list[TxInput]] = None, + outputs: Optional[list[TxOutput]] = None, + parents: Optional[list[bytes]] = None, + tokens: Optional[list[bytes]] = None, + code: Optional[Code] = None, + hash: Optional[bytes] = None, + storage: Optional['TransactionStorage'] = None, + ) -> None: + super().__init__(nonce=nonce, timestamp=timestamp, version=version, weight=weight, inputs=inputs, + outputs=outputs or [], tokens=tokens, parents=parents or [], hash=hash, storage=storage) + + self._settings = get_global_settings() + if not self._settings.ENABLE_NANO_CONTRACTS: + raise RuntimeError('NanoContracts are disabled') + + # Pubkey and signature of the transaction owner / caller. + self.nc_pubkey: bytes = b'' + self.nc_signature: bytes = b'' + + self.code: Code = code if code is not None else Code(CodeKind.PYTHON_ZLIB, b'', self._settings) + self._ast_cache: Optional[ast.Module] = None + self._blueprint_loaded_env: Optional[tuple[type[Blueprint], dict[str, object]]] = None + + def blueprint_id(self) -> BlueprintId: + """The blueprint's contract-id is it's own tx-id, this helper method just converts to the right type.""" + return blueprint_id_from_bytes(self.hash) + + def _load_blueprint_code_exec(self) -> tuple[object, dict[str, object]]: + """XXX: DO NOT CALL THIS METHOD UNLESS YOU REALLY KNOW WHAT IT DOES.""" + from hathor.nanocontracts.metered_exec import MeteredExecutor, OutOfFuelError, OutOfMemoryError + fuel = self._settings.NC_INITIAL_FUEL_TO_LOAD_BLUEPRINT_MODULE + memory_limit = self._settings.NC_MEMORY_LIMIT_TO_LOAD_BLUEPRINT_MODULE + metered_executor = MeteredExecutor(fuel=fuel, memory_limit=memory_limit) + try: + env = metered_executor.exec(self.code.text) + except OutOfFuelError as e: + self.log.error('loading blueprint module failed, fuel limit exceeded') + raise OCBOutOfFuelDuringLoading from e + except OutOfMemoryError as e: + self.log.error('loading blueprint module failed, memory limit exceeded') + raise OCBOutOfMemoryDuringLoading from e + blueprint_class = env[BLUEPRINT_CLASS_NAME] + return blueprint_class, env + + def _load_blueprint_code(self) -> tuple[type[Blueprint], dict[str, object]]: + """This method loads the on-chain code (if not loaded) and returns the blueprint class and env.""" + if self._blueprint_loaded_env is None: + blueprint_class, env = self._load_blueprint_code_exec() + assert isinstance(blueprint_class, type) + assert issubclass(blueprint_class, Blueprint) + self._blueprint_loaded_env = blueprint_class, env + return self._blueprint_loaded_env + + def get_blueprint_object_bypass(self) -> object: + """Loads the code and returns the object defined in __blueprint__""" + blueprint_class, _ = self._load_blueprint_code_exec() + return blueprint_class + + def get_blueprint_class(self) -> type[Blueprint]: + """Returns the blueprint class, loads and executes the code as needed.""" + blueprint_class, _ = self._load_blueprint_code() + return blueprint_class + + def serialize_code(self) -> bytes: + """Serialization of self.code, to be used for the serialization of this transaction type.""" + buf = bytearray() + buf.extend(int_to_bytes(ON_CHAIN_BLUEPRINT_VERSION, 1)) + serialized_code = bytes(self.code) + buf.extend(int_to_bytes(len(serialized_code), 4)) + buf.extend(serialized_code) + return bytes(buf) + + @classmethod + def deserialize_code(_cls, buf: bytes, *, verbose: VerboseCallback = None) -> tuple[Code, bytes]: + """Parses the self.code field, returns the parse result and the remaining bytes.""" + settings = get_global_settings() + + (ocb_version,), buf = unpack('!B', buf) + if verbose: + verbose('ocb_version', ocb_version) + if ocb_version != ON_CHAIN_BLUEPRINT_VERSION: + raise ValueError(f'unknown on-chain blueprint version: {ocb_version}') + + (serialized_code_len,), buf = unpack('!L', buf) + if verbose: + verbose('serialized_code_len', serialized_code_len) + max_serialized_code_len = settings.NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_COMPRESSED + if serialized_code_len > max_serialized_code_len: + raise ValueError(f'compressed code data is too large: {serialized_code_len} > {max_serialized_code_len}') + serialized_code, buf = unpack_len(serialized_code_len, buf) + if verbose: + verbose('serialized_code', serialized_code) + code = Code.from_bytes(serialized_code, settings) + return code, buf + + def _serialize_ocb(self, *, skip_signature: bool = False) -> bytes: + buf = bytearray() + buf += self.serialize_code() + buf += int_to_bytes(len(self.nc_pubkey), 1) + buf += self.nc_pubkey + if not skip_signature: + buf += int_to_bytes(len(self.nc_signature), 1) + buf += self.nc_signature + else: + buf += int_to_bytes(0, 1) + return bytes(buf) + + @override + def get_funds_struct(self) -> bytes: + struct_bytes = super().get_funds_struct() + struct_bytes += self._serialize_ocb() + return struct_bytes + + @override + def get_sighash_all(self, *, skip_cache: bool = False) -> bytes: + if not skip_cache and self._sighash_cache: + return self._sighash_cache + struct_bytes = super().get_sighash_all(skip_cache=True) + struct_bytes += self._serialize_ocb(skip_signature=True) + self._sighash_cache = struct_bytes + return struct_bytes + + @override + def get_funds_fields_from_struct(self, buf: bytes, *, verbose: VerboseCallback = None) -> bytes: + buf = super().get_funds_fields_from_struct(buf, verbose=verbose) + + code, buf = OnChainBlueprint.deserialize_code(buf, verbose=verbose) + self.code = code + + (nc_pubkey_len,), buf = unpack('!B', buf) + if verbose: + verbose('nc_pubkey_len', nc_pubkey_len) + self.nc_pubkey, buf = unpack_len(nc_pubkey_len, buf) + if verbose: + verbose('nc_pubkey', self.nc_pubkey) + (nc_signature_len,), buf = unpack('!B', buf) + if verbose: + verbose('nc_signature_len', nc_signature_len) + self.nc_signature, buf = unpack_len(nc_signature_len, buf) + if verbose: + verbose('nc_signature', self.nc_signature) + + return buf + + @override + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: + return { + **super().to_json(decode_script=decode_script, include_metadata=include_metadata), + 'on_chain_blueprint_code': self.code.to_json(), + 'nc_pubkey': self.nc_pubkey.hex(), + } + + @override + def to_json_extended(self) -> dict[str, Any]: + return { + **super().to_json_extended(), + 'on_chain_blueprint_code': self.code.to_json_extended(), + 'nc_pubkey': self.nc_pubkey.hex(), + 'nc_signature': self.nc_signature.hex(), + } + + @override + def get_minimum_number_of_inputs(self) -> int: + return 0 + + def get_method(self, method_name: str) -> Method: + # XXX: possibly do this by analyzing the source AST instead of using the loaded code + blueprint_class = self.get_blueprint_class() + return Method.from_callable(getattr(blueprint_class, method_name)) + + def sign(self, private_key: ec.EllipticCurvePrivateKey) -> None: + """Sign this blueprint with the provided private key.""" + pubkey = private_key.public_key() + self.nc_pubkey = get_public_key_bytes_compressed(pubkey) + data = self.get_sighash_all_data() + self.nc_signature = private_key.sign(data, ec.ECDSA(hashes.SHA256())) + + def get_related_addresses(self) -> set[str]: + """Besides the common tx related addresses, we must also add the nc_pubkey.""" + ret = super().get_related_addresses() + ret.add(get_address_b58_from_public_key_bytes(self.nc_pubkey)) + return ret diff --git a/hathor/nanocontracts/resources/__init__.py b/hathor/nanocontracts/resources/__init__.py new file mode 100644 index 000000000..5bb0b1119 --- /dev/null +++ b/hathor/nanocontracts/resources/__init__.py @@ -0,0 +1,31 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts.resources.blueprint import BlueprintInfoResource +from hathor.nanocontracts.resources.blueprint_source_code import BlueprintSourceCodeResource +from hathor.nanocontracts.resources.builtin import BlueprintBuiltinResource +from hathor.nanocontracts.resources.history import NanoContractHistoryResource +from hathor.nanocontracts.resources.nc_creation import NCCreationResource +from hathor.nanocontracts.resources.on_chain import BlueprintOnChainResource +from hathor.nanocontracts.resources.state import NanoContractStateResource + +__all__ = [ + 'BlueprintBuiltinResource', + 'BlueprintInfoResource', + 'BlueprintOnChainResource', + 'BlueprintSourceCodeResource', + 'NanoContractStateResource', + 'NanoContractHistoryResource', + 'NCCreationResource', +] diff --git a/hathor/nanocontracts/resources/blueprint.py b/hathor/nanocontracts/resources/blueprint.py new file mode 100644 index 000000000..242f3beab --- /dev/null +++ b/hathor/nanocontracts/resources/blueprint.py @@ -0,0 +1,251 @@ +# Copyright 2022 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import builtins +import inspect +import types +import typing +from typing import TYPE_CHECKING, Any, Optional + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.nanocontracts import types as nc_types +from hathor.nanocontracts.blueprint import NC_FIELDS_ATTR +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import BlueprintDoesNotExist +from hathor.nanocontracts.types import blueprint_id_from_bytes +from hathor.nanocontracts.utils import is_nc_public_method, is_nc_view_method +from hathor.utils.api import ErrorResponse, QueryParams, Response +from hathor.utils.typing import get_args, get_origin + +if TYPE_CHECKING: + from twisted.web.http import Request + + from hathor.manager import HathorManager + + +@register_resource +class BlueprintInfoResource(Resource): + """Implements a GET API to return information about a blueprint.""" + isLeaf = True + + def __init__(self, manager: 'HathorManager'): + self.manager = manager + + def _get_composed_type_name(self, type_name: str, args: tuple[Any, ...]) -> str: + subtypes = ', '.join([self.get_type_name(x) for x in args]) + return f'{type_name}[{subtypes}]' + + def _get_optional_type_name(self, arg: Any) -> str: + subtype = self.get_type_name(arg) + return f'{subtype}?' + + def get_type_name(self, type_: type) -> str: + """Return a string representation for `type_`.""" + origin = get_origin(type_) or type_ + args = get_args(type_) or tuple() + + if (type_ is type(None)) or (type_ is None): # noqa: E721 + return 'null' + + match origin: + case builtins.dict | builtins.tuple | builtins.list | builtins.set: + return self._get_composed_type_name(origin.__name__, args) + case typing.Union | types.UnionType: + match args: + case (_subtype, types.NoneType) | (types.NoneType, _subtype): + return self._get_optional_type_name(_subtype) + return self._get_composed_type_name('union', args) + case nc_types.SignedData: + return self._get_composed_type_name('SignedData', args) + + return type_.__name__ + + def render_GET(self, request: 'Request') -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + params = BlueprintInfoParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + try: + blueprint_id = blueprint_id_from_bytes(bytes.fromhex(params.blueprint_id)) + except ValueError: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid id: {params.blueprint_id}') + return error_response.json_dumpb() + + try: + blueprint_class = self.manager.tx_storage.get_blueprint_class(blueprint_id) + except BlueprintDoesNotExist: + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error=f'Blueprint not found: {params.blueprint_id}') + return error_response.json_dumpb() + + attributes: dict[str, str] = {} + fields = getattr(blueprint_class, NC_FIELDS_ATTR) + for name, _type in fields.items(): + assert name not in attributes + attributes[name] = self.get_type_name(_type) + + public_methods = {} + view_methods = {} + skip_methods = {'__init__'} + for name, method in inspect.getmembers(blueprint_class, predicate=inspect.isfunction): + if name in skip_methods: + continue + + if not (is_nc_public_method(method) or is_nc_view_method(method)): + continue + + method_args = [] + argspec = inspect.getfullargspec(method) + for arg_name in argspec.args[1:]: + arg_type = argspec.annotations[arg_name] + if arg_type is Context: + continue + method_args.append(MethodArgInfo( + name=arg_name, + type=self.get_type_name(arg_type), + )) + + return_type = argspec.annotations.get('return', None) + + method_info = MethodInfo( + args=method_args, + return_type=self.get_type_name(return_type), + docstring=inspect.getdoc(method), + ) + + if is_nc_public_method(method): + assert name not in public_methods + public_methods[name] = method_info + + if is_nc_view_method(method): + assert name not in view_methods + view_methods[name] = method_info + + response = BlueprintInfoResponse( + id=params.blueprint_id, + name=blueprint_class.__name__, + attributes=attributes, + public_methods=public_methods, + private_methods=view_methods, # DEPRECATED + view_methods=view_methods, + docstring=inspect.getdoc(blueprint_class), + ) + return response.json_dumpb() + + +class BlueprintInfoParams(QueryParams): + blueprint_id: str + + +class MethodArgInfo(Response): + name: str + type: str + + +class MethodInfo(Response): + args: list[MethodArgInfo] + return_type: Optional[str] + docstring: str | None + + +class BlueprintInfoResponse(Response): + id: str + name: str + attributes: dict[str, str] + public_methods: dict[str, MethodInfo] + private_methods: dict[str, MethodInfo] # DEPRECATED + view_methods: dict[str, MethodInfo] + docstring: str | None + + +BlueprintInfoResource.openapi = { + '/nano_contract/blueprint/info': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '100r/s', + 'burst': 100, + 'delay': 100 + } + ], + 'per-ip': [ + { + 'rate': '3r/s', + 'burst': 10, + 'delay': 3 + } + ] + }, + 'get': { + 'operationId': 'blueprint-info', + 'summary': 'Return information about a blueprint', + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'Success', + 'value': { + 'id': '3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595', + 'name': 'Bet', + 'attributes': { + 'total_bets': 'int', + }, + 'public_methods': { + 'initialize': { + 'args': [{ + 'name': 'oracle_script', + 'type': 'bytes' + }], + 'return_type': 'null' + }, + 'bet': { + 'args': [{ + 'name': 'address', + 'type': 'bytes', + }, { + 'name': 'score', + 'type': 'str' + }], + 'return_type': 'null' + }, + }, + 'view_methods': { + 'get_winner_amount': { + 'args': [{ + 'name': 'address', + 'type': 'bytes' + }], + 'return_type': 'int' + }, + } + } + } + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/resources/blueprint_source_code.py b/hathor/nanocontracts/resources/blueprint_source_code.py new file mode 100644 index 000000000..193790a0c --- /dev/null +++ b/hathor/nanocontracts/resources/blueprint_source_code.py @@ -0,0 +1,124 @@ +# Copyright 2022 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.nanocontracts.exception import BlueprintDoesNotExist, OCBBlueprintNotConfirmed +from hathor.nanocontracts.types import blueprint_id_from_bytes +from hathor.utils.api import ErrorResponse, QueryParams, Response + +if TYPE_CHECKING: + from twisted.web.http import Request + + from hathor.manager import HathorManager + + +@register_resource +class BlueprintSourceCodeResource(Resource): + """Implements a GET API to return the source code of a blueprint.""" + isLeaf = True + + def __init__(self, manager: 'HathorManager'): + self.manager = manager + + def render_GET(self, request: 'Request') -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + params = BlueprintSourceCodeParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + try: + blueprint_id = blueprint_id_from_bytes(bytes.fromhex(params.blueprint_id)) + except ValueError: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid id: {params.blueprint_id}') + return error_response.json_dumpb() + + assert self.manager.tx_storage.nc_catalog is not None + + try: + blueprint_source = self.manager.tx_storage.get_blueprint_source(blueprint_id) + except OCBBlueprintNotConfirmed: + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error=f'Blueprint not confirmed: {params.blueprint_id}') + return error_response.json_dumpb() + except BlueprintDoesNotExist: + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error=f'Blueprint not found: {params.blueprint_id}') + return error_response.json_dumpb() + + response = BlueprintSourceCodeResponse( + id=params.blueprint_id, + source_code=blueprint_source, + ) + return response.json_dumpb() + + +class BlueprintSourceCodeParams(QueryParams): + blueprint_id: str + + +class BlueprintSourceCodeResponse(Response): + id: str + source_code: str + + +BlueprintSourceCodeResource.openapi = { + '/nano_contract/blueprint/source': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '5r/s', + 'burst': 8, + 'delay': 3 + } + ], + 'per-ip': [ + { + 'rate': '2r/s', + 'burst': 4, + 'delay': 3 + } + ] + }, + 'get': { + 'operationId': 'blueprint-source-code', + 'summary': 'Return source code of a blueprint', + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'Success', + 'value': { + 'id': '3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595', + 'source_code': 'def f(arg1: str):\nreturn arg1 + 2', + } + } + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/resources/builtin.py b/hathor/nanocontracts/resources/builtin.py new file mode 100644 index 000000000..a75abe90b --- /dev/null +++ b/hathor/nanocontracts/resources/builtin.py @@ -0,0 +1,214 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Iterator + +from pydantic import Field +from sortedcontainers import SortedKeyList +from twisted.web.http import Request + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.manager import HathorManager +from hathor.nanocontracts import Blueprint +from hathor.util import collect_n +from hathor.utils.api import ErrorResponse, QueryParams, Response + + +@register_resource +class BlueprintBuiltinResource(Resource): + """Implements a GET API to return a list of builtin blueprints.""" + isLeaf = True + + def __init__(self, manager: HathorManager) -> None: + super().__init__() + self.manager = manager + + def render_GET(self, request: Request) -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + params = BuiltinBlueprintsParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + if params.after and params.before: + request.setResponseCode(400) + error_response = ErrorResponse( + success=False, error='Parameters after and before can\'t be used together.') + return error_response.json_dumpb() + + assert self.manager.tx_storage.nc_catalog is not None + builtin_bps = list(self.manager.tx_storage.nc_catalog.blueprints.items()) + + filtered_bps = builtin_bps + if params.search: + search = params.search.strip().lower() + # first we try to find by blueprint ID + filtered_bps = [ + (bp_id, bp_class) for bp_id, bp_class in builtin_bps + if bp_id.hex().lower() == search + ] + + if filtered_bps: + # If we find the Blueprint, it's a single match, and any pagination returns empty. + assert len(filtered_bps) == 1 + if params.after or params.before: + filtered_bps = [] + else: + # If we didn't find it, we'll try by name + filtered_bps = [ + (bp_id, bp_class) for bp_id, bp_class in builtin_bps + if search in bp_class.__name__.lower() + ] + + sorted_bps = SortedKeyList(filtered_bps, key=lambda bp_id_and_class: bp_id_and_class[0]) + reverse = bool(params.before) + start_key = bytes.fromhex(params.before or params.after or '') or None + bp_iter: Iterator[tuple[bytes, type[Blueprint]]] = sorted_bps.irange_key( + min_key=None if reverse else start_key, + max_key=start_key if reverse else None, + reverse=reverse, + inclusive=(False, False), + ) + page, has_more = collect_n(bp_iter, params.count) + + blueprints = [ + BuiltinBlueprintItem(id=bp_id.hex(), name=bp_class.__name__) + for bp_id, bp_class in page + ] + + response = BuiltinBlueprintsResponse( + before=params.before, + after=params.after, + count=params.count, + has_more=has_more, + blueprints=blueprints, + ) + return response.json_dumpb() + + +class BuiltinBlueprintsParams(QueryParams, use_enum_values=True): + before: str | None + after: str | None + count: int = Field(default=10, gt=0, le=100) + search: str | None = None + + +class BuiltinBlueprintItem(Response): + id: str + name: str + + +class BuiltinBlueprintsResponse(Response): + success: bool = Field(default=True, const=True) + blueprints: list[BuiltinBlueprintItem] + before: str | None + after: str | None + count: int + has_more: bool + + +BlueprintBuiltinResource.openapi = { + '/nano_contract/blueprint/builtin': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '100r/s', + 'burst': 100, + 'delay': 100 + } + ], + 'per-ip': [ + { + 'rate': '3r/s', + 'burst': 10, + 'delay': 3 + } + ] + }, + 'get': { + 'operationId': 'builtin-blueprints', + 'summary': 'Return a list of builtin blueprints', + 'parameters': [ + { + 'name': 'before', + 'in': 'query', + 'description': 'Hash of transaction to offset the result before.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'after', + 'in': 'query', + 'description': 'Hash of transaction to offset the result after.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'count', + 'in': 'query', + 'description': 'Maximum number of items to be returned. Default is 10.', + 'required': False, + 'schema': { + 'type': 'int', + } + }, + { + 'name': 'search', + 'in': 'query', + 'description': 'Filter the list using the provided string, that could be a Blueprint ID or name.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + ], + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'Success', + 'value': { + 'success': True, + 'before': None, + 'after': None, + 'count': 10, + 'has_more': False, + 'blueprints': [ + { + 'id': '3cb032600bdf7db784800e4ea911b106' + '76fa2f67591f82bb62628c234e771595', + 'name': 'Bet' + } + ], + } + } + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/resources/history.py b/hathor/nanocontracts/resources/history.py new file mode 100644 index 000000000..9e10bfd35 --- /dev/null +++ b/hathor/nanocontracts/resources/history.py @@ -0,0 +1,265 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Any, Optional + +from pydantic import Field + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.nanocontracts.exception import NanoContractDoesNotExist +from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.utils.api import ErrorResponse, QueryParams, Response + +if TYPE_CHECKING: + from twisted.web.http import Request + + from hathor.manager import HathorManager + + +@register_resource +class NanoContractHistoryResource(Resource): + """ Implements a web server GET API to get a nano contract history. + You must run with option `--status `. + """ + isLeaf = True + + def __init__(self, manager: 'HathorManager'): + self.manager = manager + + def render_GET(self, request: 'Request') -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + tx_storage = self.manager.tx_storage + assert tx_storage.indexes is not None + if tx_storage.indexes.nc_history is None: + request.setResponseCode(503) + error_response = ErrorResponse(success=False, error='Nano contract history index not initialized') + return error_response.json_dumpb() + + params = NCHistoryParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + if params.after and params.before: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error='Parameters after and before can\'t be used together.') + return error_response.json_dumpb() + + try: + nc_id_bytes = bytes.fromhex(params.id) + except ValueError: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid id: {params.id}') + return error_response.json_dumpb() + + # Check if the contract exists. + try: + self.manager.get_best_block_nc_storage(nc_id_bytes) + except NanoContractDoesNotExist: + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error='Nano contract does not exist.') + return error_response.json_dumpb() + + if params.after: + try: + ref_tx = tx_storage.get_transaction(bytes.fromhex(params.after)) + except TransactionDoesNotExist: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Hash {params.after} is not a transaction hash.') + return error_response.json_dumpb() + + iter_history = iter(tx_storage.indexes.nc_history.get_older(nc_id_bytes, ref_tx)) + # This method returns the iterator including the tx used as `after` + next(iter_history) + elif params.before: + try: + ref_tx = tx_storage.get_transaction(bytes.fromhex(params.before)) + except TransactionDoesNotExist: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Hash {params.before} is not a transaction hash.') + return error_response.json_dumpb() + + iter_history = iter(tx_storage.indexes.nc_history.get_newer(nc_id_bytes, ref_tx)) + # This method returns the iterator including the tx used as `before` + next(iter_history) + else: + iter_history = iter(tx_storage.indexes.nc_history.get_newest(nc_id_bytes)) + + count = params.count + has_more = False + history_list = [] + for idx, tx_id in enumerate(iter_history): + history_list.append(tx_storage.get_transaction(tx_id).to_json_extended()) + if idx >= count - 1: + # Check if iterator still has more elements + try: + next(iter_history) + has_more = True + except StopIteration: + has_more = False + break + + response = NCHistoryResponse( + success=True, + count=count, + after=params.after, + before=params.before, + history=history_list, + has_more=has_more, + ) + return response.json_dumpb() + + +class NCHistoryParams(QueryParams): + id: str + after: Optional[str] + before: Optional[str] + count: int = Field(default=100, lt=500) + + +class NCHistoryResponse(Response): + success: bool + count: int + after: Optional[str] + before: Optional[str] + history: list[dict[str, Any]] + has_more: bool + + +openapi_history_response = { + 'hash': '5c02adea056d7b43e83171a0e2d226d564c791d583b32e9a404ef53a2e1b363a', + 'nonce': 0, + 'timestamp': 1572636346, + 'version': 4, + 'weight': 1, + 'signal_bits': 0, + 'parents': ['1234', '5678'], + 'inputs': [], + 'outputs': [], + 'metadata': { + 'hash': '5c02adea056d7b43e83171a0e2d226d564c791d583b32e9a404ef53a2e1b363a', + 'spent_outputs': [], + 'received_by': [], + 'children': [], + 'conflict_with': [], + 'voided_by': [], + 'twins': [], + 'accumulated_weight': 1, + 'score': 0, + 'height': 0, + 'min_height': 0, + 'feature_activation_bit_counts': None, + 'first_block': None, + 'validation': 'full' + }, + 'tokens': [], + 'nc_id': '5c02adea056d7b43e83171a0e2d226d564c791d583b32e9a404ef53a2e1b363a', + 'nc_method': 'initialize', + 'nc_args': '0004313233340001000004654d8749', + 'nc_pubkey': '033f5d238afaa9e2218d05dd7fa50eb6f9e55431e6359e04b861cd991ae24dc655' +} + + +NanoContractHistoryResource.openapi = { + '/nano_contract/history': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '3r/s', + 'burst': 10, + 'delay': 3 + } + ], + 'per-ip': [ + { + 'rate': '1r/s', + 'burst': 4, + 'delay': 2 + } + ] + }, + 'get': { + 'tags': ['nano_contracts'], + 'operationId': 'nano_contracts_history', + 'summary': 'Get history of a nano contract', + 'description': 'Returns the history of a nano contract.', + 'parameters': [ + { + 'name': 'id', + 'in': 'query', + 'description': 'ID of the nano contract to get the history from.', + 'required': True, + 'schema': { + 'type': 'string' + } + }, { + 'name': 'count', + 'in': 'query', + 'description': 'Maximum number of items to be returned. Default is 100.', + 'required': False, + 'schema': { + 'type': 'int', + } + }, { + 'name': 'after', + 'in': 'query', + 'description': 'Hash of transaction to offset the result after.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, { + 'name': 'before', + 'in': 'query', + 'description': 'Hash of transaction to offset the result before.', + 'required': False, + 'schema': { + 'type': 'string', + } + } + ], + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'History of a nano contract', + 'value': { + 'success': True, + 'count': 100, + 'has_more': False, + 'history': [openapi_history_response], + } + }, + 'error': { + 'summary': 'Nano contract history index not initialized.', + 'value': { + 'success': False, + 'message': 'Nano contract history index not initialized.' + } + }, + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/resources/nc_creation.py b/hathor/nanocontracts/resources/nc_creation.py new file mode 100644 index 000000000..519936276 --- /dev/null +++ b/hathor/nanocontracts/resources/nc_creation.py @@ -0,0 +1,328 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from pydantic import Field +from twisted.web.http import Request + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.manager import HathorManager +from hathor.nanocontracts.resources.on_chain import SortOrder +from hathor.nanocontracts.types import BlueprintId, VertexId +from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.util import bytes_from_hex, collect_n, not_none +from hathor.utils.api import ErrorResponse, QueryParams, Response + + +@register_resource +class NCCreationResource(Resource): + """Implements a GET API to return a list of NC creation txs.""" + isLeaf = True + + def __init__(self, manager: HathorManager) -> None: + super().__init__() + self.manager = manager + self.tx_storage = self.manager.tx_storage + assert self.tx_storage.indexes is not None + self.nc_creation_index = self.tx_storage.indexes.nc_creation + self.nc_history_index = self.tx_storage.indexes.nc_history + self.bp_history_index = self.tx_storage.indexes.blueprint_history + + def render_GET(self, request: Request) -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + if not self.nc_creation_index or not self.nc_history_index or not self.bp_history_index: + request.setResponseCode(503) + error_response = ErrorResponse(success=False, error='NC indexes not initialized, use --nc-indexes') + return error_response.json_dumpb() + + params = NCCreationParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + if params.after and params.before: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error='Parameters after and before can\'t be used together.') + return error_response.json_dumpb() + + vertex_id: VertexId | None = None + if params.search: + search = params.search.strip() + maybe_bytes = bytes_from_hex(search) + if maybe_bytes is None: + # in this case we do have `search` but it's not a valid hex, so we return empty. + response = NCCreationResponse( + nc_creation_txs=[], + before=params.before, + after=params.after, + count=params.count, + has_more=False, + ) + return response.json_dumpb() + + vertex_id = VertexId(maybe_bytes) + + # when using `search`, the value can be either a NC ID or a BP ID. + if nc_item := self._get_nc_creation_item(vertex_id): + # if we find the respective NC, it's a single match, and therefore any pagination + # returns an empty result. + nc_list = [nc_item] if not params.after and not params.before else [] + response = NCCreationResponse( + nc_creation_txs=nc_list, + before=params.before, + after=params.after, + count=params.count, + has_more=False, + ) + return response.json_dumpb() + # now vertex_id may be a BP, so it will be used below + + is_desc = params.order.is_desc() + + if not params.before and not params.after: + if vertex_id: + iter_nc_ids = ( + self.bp_history_index.get_newest(vertex_id) + if is_desc else self.bp_history_index.get_oldest(vertex_id) + ) + else: + iter_nc_ids = self.nc_creation_index.get_newest() if is_desc else self.nc_creation_index.get_oldest() + else: + ref_tx_id_hex = params.before or params.after + assert ref_tx_id_hex is not None + ref_tx_id = bytes_from_hex(ref_tx_id_hex) + if ref_tx_id is None: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid "before" or "after": {ref_tx_id_hex}') + return error_response.json_dumpb() + + try: + ref_tx = self.tx_storage.get_transaction(ref_tx_id) + except TransactionDoesNotExist: + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error=f'Transaction {ref_tx_id_hex} not found.') + return error_response.json_dumpb() + + if vertex_id: + if is_desc: + iter_getter = self.bp_history_index.get_newer if params.before else self.bp_history_index.get_older + else: + iter_getter = self.bp_history_index.get_older if params.before else self.bp_history_index.get_newer + iter_nc_ids = iter_getter(vertex_id, ref_tx) + next(iter_nc_ids) # these iterators include the ref_tx, so we skip it. + else: + if is_desc: + iter_getter2 = ( + self.nc_creation_index.get_newer if params.before else self.nc_creation_index.get_older + ) + else: + iter_getter2 = ( + self.nc_creation_index.get_older if params.before else self.nc_creation_index.get_newer + ) + iter_nc_ids = iter_getter2(tx_start=ref_tx) + + iter_ncs = map(self._get_nc_creation_item_strict, iter_nc_ids) + nc_txs, has_more = collect_n(iter_ncs, params.count) + response = NCCreationResponse( + nc_creation_txs=nc_txs, + before=params.before, + after=params.after, + count=params.count, + has_more=has_more, + ) + return response.json_dumpb() + + def _get_nc_creation_item(self, nc_id: bytes) -> NCCreationItem | None: + try: + tx = self.tx_storage.get_transaction(nc_id) + except TransactionDoesNotExist: + return None + + if not tx.is_nano_contract(): + return None + + from hathor.transaction import Transaction + if not isinstance(tx, Transaction): + return None + + nano_header = tx.get_nano_header() + if not nano_header.is_creating_a_new_contract(): + return None + + blueprint_id = BlueprintId(VertexId(nano_header.nc_id)) + blueprint_class = self.tx_storage.get_blueprint_class(blueprint_id) + + assert self.nc_history_index is not None + return NCCreationItem( + nano_contract_id=nc_id.hex(), + blueprint_id=blueprint_id.hex(), + blueprint_name=blueprint_class.__name__, + last_tx_timestamp=not_none(self.nc_history_index.get_last_tx_timestamp(nc_id)), + total_txs=self.nc_history_index.get_transaction_count(nc_id), + created_at=tx.timestamp, + ) + + def _get_nc_creation_item_strict(self, nc_id: bytes) -> NCCreationItem: + tx = self._get_nc_creation_item(nc_id) + assert tx is not None + return tx + + +class NCCreationParams(QueryParams): + before: str | None + after: str | None + count: int = Field(default=10, le=100) + search: str | None + order: SortOrder = SortOrder.DESC + + +class NCCreationItem(Response): + nano_contract_id: str + blueprint_id: str + blueprint_name: str + last_tx_timestamp: int + total_txs: int + created_at: int + + +class NCCreationResponse(Response): + success: bool = Field(default=True, const=True) + nc_creation_txs: list[NCCreationItem] + before: str | None + after: str | None + count: int + has_more: bool + + +NCCreationResource.openapi = { + '/nano_contract/creation': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '3r/s', + 'burst': 10, + 'delay': 3 + } + ], + 'per-ip': [ + { + 'rate': '1r/s', + 'burst': 4, + 'delay': 2 + } + ] + }, + 'get': { + 'tags': ['nano_contracts'], + 'operationId': 'nc-creations-txs', + 'summary': 'Get a list of Nano Contract creation transactions', + 'parameters': [ + { + 'name': 'before', + 'in': 'query', + 'description': 'Hash of transaction to offset the result before.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'after', + 'in': 'query', + 'description': 'Hash of transaction to offset the result after.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'count', + 'in': 'query', + 'description': 'Maximum number of items to be returned. Default is 10.', + 'required': False, + 'schema': { + 'type': 'int', + } + }, + { + 'name': 'search', + 'in': 'query', + 'description': 'Filter the list using the provided string,' + 'that could be a Nano Contract ID or a Blueprint ID.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'order', + 'in': 'query', + 'description': 'Sort order, either "asc" or "desc".', + 'required': False, + 'schema': { + 'type': 'string', + } + } + ], + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'Success', + 'value': { + 'success': True, + 'after': None, + 'before': None, + 'count': 10, + 'has_more': False, + 'nc_creation_txs': [ + { + 'blueprint_id': '3cb032600bdf7db784800e4ea911b106' + '76fa2f67591f82bb62628c234e771595', + 'blueprint_name': 'BlueprintA', + 'created_at': 1737565681, + 'last_tx_timestamp': 1737565681, + 'nano_contract_id': '081c0e7586486d657353bc844b26dace' + 'aa93e54e2f0b65e9debf956e51a3805f', + 'total_txs': 1 + }, + { + 'blueprint_id': '15b9eb0547e0961259df84c400615a69' + 'fc204fe8d026b93337c33f0b9377a5bd', + 'blueprint_name': 'BlueprintB', + 'created_at': 1737565679, + 'last_tx_timestamp': 1737565679, + 'nano_contract_id': '773cd47af52e55fca04ce3aecab585c9' + '40b4661daf600956b3d60cff8fa186ed', + 'total_txs': 1 + } + ] + } + }, + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/resources/nc_exec_logs.py b/hathor/nanocontracts/resources/nc_exec_logs.py new file mode 100644 index 000000000..3a8dd0da3 --- /dev/null +++ b/hathor/nanocontracts/resources/nc_exec_logs.py @@ -0,0 +1,201 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +from pydantic import Field +from twisted.web.http import Request + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.manager import HathorManager +from hathor.nanocontracts.nc_exec_logs import NCLogLevel +from hathor.transaction import Transaction +from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.utils.api import ErrorResponse, QueryParams + + +@register_resource +class NCExecLogsResource(Resource): + """Implements a web server GET API to get nano contract execution logs.""" + isLeaf = True + + def __init__(self, manager: HathorManager) -> None: + super().__init__() + self.manager = manager + self.nc_log_storage = manager.consensus_algorithm.block_algorithm_factory.nc_log_storage + + def render_GET(self, request: Request) -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + if self.nc_log_storage is None: + request.setResponseCode(503) + error_response = ErrorResponse(success=False, error='Nano contract exec logs not initialized') + return error_response.json_dumpb() + + params = NCExecLogsParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + try: + nc_id_bytes = bytes.fromhex(params.id) + except ValueError: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid id: {params.id}') + return error_response.json_dumpb() + + try: + nc = self.manager.tx_storage.get_transaction(nc_id_bytes) + except TransactionDoesNotExist: + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error=f'NC "{params.id}" not found.') + return error_response.json_dumpb() + + if not nc.is_nano_contract(): + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error=f'NC "{params.id}" not found.') + return error_response.json_dumpb() + + log_level: NCLogLevel = NCLogLevel.DEBUG + if params.log_level is not None: + params_log_level = NCLogLevel.from_str(params.log_level) + if not params_log_level: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid log level: {params.log_level}') + return error_response.json_dumpb() + log_level = params_log_level + + meta = nc.get_metadata() + logs = self.nc_log_storage.get_json_logs( + nc.hash, + log_level=log_level, + block_id=None if params.all_execs else meta.first_block, + ) + + if logs is None: + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error='No logs were found.') + return error_response.json_dumpb() + + assert isinstance(nc, Transaction) + nano_header = nc.get_nano_header() + + response = NCExecLogsResponse( + logs=logs, + nc_id=nano_header.get_contract_id().hex(), + nc_execution=meta.nc_execution, + ) + return response.json_dumpb() + + +class NCExecLogsParams(QueryParams): + id: str + log_level: str | None = None + all_execs: bool = False + + +class NCExecLogsResponse(QueryParams): + success: bool = Field(const=True, default=True) + nc_id: str + nc_execution: str | None + logs: dict[str, Any] + + +NCExecLogsResource.openapi = { + '/nano_contract/logs': { + 'x-visibility': 'private', + 'get': { + 'operationId': 'nano_contracts_logs', + 'summary': 'Get execution logs of a nano contract', + 'description': 'Returns the execution logs of a nano contract per Block ID that executed it.', + 'parameters': [ + { + 'name': 'id', + 'in': 'query', + 'description': 'ID of the nano contract to get the logs from.', + 'required': True, + 'schema': { + 'type': 'string' + } + }, + { + 'name': 'log_level', + 'in': 'query', + 'description': 'Minimum log level to filter logs. One of DEBUG, INFO, WARN, ERROR. ' + 'Default is DEBUG, that is, no filter.', + 'required': False, + 'schema': { + 'type': 'string' + } + }, + { + 'name': 'all_execs', + 'in': 'query', + 'description': 'Whether to get all NC executions or just from the current block that executed the ' + 'NC, that is, the NC\'s first_block. Default is false.', + 'required': False, + 'schema': { + 'type': 'bool' + } + }, + ], + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'NC execution logs', + 'value': { + 'success': True, + 'logs': { + '25b90432c597f715e4ad4bd62436ae5f48dc988d47f051d8b3eb21ca008d6783': [ + { + 'error_traceback': None, + 'timestamp': 1739289130, + 'logs': [ + { + 'type': 'BEGIN', + 'level': 'DEBUG', + 'nc_id': '00001cc24fc57fce28da879c24d46d84' + '1c932c04bdadac28f0cd530c6c702dc9', + 'call_type': 'public', + 'method_name': 'initialize', + 'args': [], + 'kwargs': {}, + 'timestamp': 1739289133, + }, + { + 'type': 'LOG', + 'level': 'INFO', + 'message': 'initialize() called on MyBlueprint1', + 'key_values': {} + } + ], + } + ], + }, + } + } + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/resources/on_chain.py b/hathor/nanocontracts/resources/on_chain.py new file mode 100644 index 000000000..9f9a1c321 --- /dev/null +++ b/hathor/nanocontracts/resources/on_chain.py @@ -0,0 +1,281 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum + +from pydantic import Field +from twisted.web.http import Request + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.manager import HathorManager +from hathor.nanocontracts.exception import ( + BlueprintDoesNotExist, + OCBBlueprintNotConfirmed, + OCBInvalidBlueprintVertexType, +) +from hathor.nanocontracts.types import blueprint_id_from_bytes +from hathor.util import bytes_from_hex +from hathor.utils.api import ErrorResponse, QueryParams, Response + + +@register_resource +class BlueprintOnChainResource(Resource): + """Implements a GET API to return a list of on-chain blueprints.""" + isLeaf = True + + def __init__(self, manager: HathorManager) -> None: + super().__init__() + self.manager = manager + + def render_GET(self, request: Request) -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + tx_storage = self.manager.tx_storage + assert tx_storage.indexes is not None + if tx_storage.indexes.blueprints is None: + request.setResponseCode(503) + error_response = ErrorResponse(success=False, error='Blueprint index not initialized') + return error_response.json_dumpb() + + bp_index = tx_storage.indexes.blueprints + + params = OnChainBlueprintsParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + if params.after and params.before: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error='Parameters after and before can\'t be used together.') + return error_response.json_dumpb() + + if params.search: + search = params.search.strip() + blueprint_list = [] + if bp_id := bytes_from_hex(search): + try: + bp_tx = tx_storage.get_on_chain_blueprint(blueprint_id_from_bytes(bp_id)) + except (BlueprintDoesNotExist, OCBInvalidBlueprintVertexType, OCBBlueprintNotConfirmed): + pass + else: + bp_class = bp_tx.get_blueprint_class() + bp_item = OnChainBlueprintItem( + id=search, + name=bp_class.__name__, + created_at=bp_tx.timestamp, + ) + blueprint_list = [bp_item] if not params.after and not params.before else [] + + response = OnChainBlueprintsResponse( + blueprints=blueprint_list, + before=params.before, + after=params.after, + count=params.count, + has_more=False, + ) + return response.json_dumpb() + + if not params.before and not params.after: + iter_bps = bp_index.get_newest() if params.order.is_desc() else bp_index.get_oldest() + else: + ref_tx_id = bytes.fromhex(params.before or params.after or '') + assert ref_tx_id + try: + ref_tx = tx_storage.get_on_chain_blueprint(blueprint_id_from_bytes(ref_tx_id)) + except (BlueprintDoesNotExist, OCBInvalidBlueprintVertexType, OCBBlueprintNotConfirmed) as e: + request.setResponseCode(404) + error_response = ErrorResponse( + success=False, error=f'Blueprint not found: {repr(e)}' + ) + return error_response.json_dumpb() + + if params.order.is_desc(): + iter_bps_getter = bp_index.get_newer if params.before else bp_index.get_older + else: + iter_bps_getter = bp_index.get_older if params.before else bp_index.get_newer + iter_bps = iter_bps_getter(tx_start=ref_tx) + + has_more = False + blueprints = [] + for idx, bp_id in enumerate(iter_bps): + try: + bp_tx = tx_storage.get_on_chain_blueprint(blueprint_id_from_bytes(bp_id)) + except (BlueprintDoesNotExist, OCBInvalidBlueprintVertexType): + raise AssertionError('bps iterator must always yield valid blueprint txs') + except OCBBlueprintNotConfirmed: + # unconfirmed OCBs are simply not added to the response + continue + bp_class = bp_tx.get_blueprint_class() + bp_item = OnChainBlueprintItem( + id=bp_id.hex(), + name=bp_class.__name__, + created_at=bp_tx.timestamp, + ) + blueprints.append(bp_item) + if idx >= params.count - 1: + try: + next(iter_bps) + has_more = True + except StopIteration: + has_more = False + break + + response = OnChainBlueprintsResponse( + blueprints=blueprints, + before=params.before, + after=params.after, + count=params.count, + has_more=has_more, + ) + return response.json_dumpb() + + +class SortOrder(str, Enum): + ASC = 'asc' + DESC = 'desc' + + def is_desc(self) -> bool: + return self == SortOrder.DESC + + +class OnChainBlueprintsParams(QueryParams): + before: str | None + after: str | None + count: int = Field(default=10, le=100) + search: str | None = None + order: SortOrder = SortOrder.DESC + + +class OnChainBlueprintItem(Response): + id: str + name: str + created_at: int + + +class OnChainBlueprintsResponse(Response): + success: bool = Field(default=True, const=True) + blueprints: list[OnChainBlueprintItem] + before: str | None + after: str | None + count: int + has_more: bool + + +BlueprintOnChainResource.openapi = { + '/nano_contract/blueprint/on_chain': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '100r/s', + 'burst': 100, + 'delay': 100 + } + ], + 'per-ip': [ + { + 'rate': '3r/s', + 'burst': 10, + 'delay': 3 + } + ] + }, + 'get': { + 'operationId': 'on-chain-blueprints', + 'summary': 'Return a list of on-chain blueprints', + 'parameters': [ + { + 'name': 'before', + 'in': 'query', + 'description': 'Hash of transaction to offset the result before.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'after', + 'in': 'query', + 'description': 'Hash of transaction to offset the result after.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'count', + 'in': 'query', + 'description': 'Maximum number of items to be returned. Default is 10.', + 'required': False, + 'schema': { + 'type': 'int', + } + }, + { + 'name': 'search', + 'in': 'query', + 'description': 'Filter the list using the provided string, that can be a Blueprint ID.', + 'required': False, + 'schema': { + 'type': 'string', + } + }, + { + 'name': 'order', + 'in': 'query', + 'description': 'Sort order, either "asc" or "desc".', + 'required': False, + 'schema': { + 'type': 'string', + } + } + ], + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'Success', + 'value': { + 'success': True, + 'blueprints': [ + { + 'id': '0000035c5977ff42c40e6845f91d72af4feb06ce87ce9f50119b5d00e0906458', + 'name': 'BlueprintA', + 'created_at': 1736353724 + }, + { + 'id': '0000010881987e7fcce37cac7c1342f6f81b0a8e2f9c8ba6377a6272d433366e', + 'name': 'BlueprintB', + 'created_at': 1736351322 + } + ], + 'before': None, + 'after': None, + 'count': 2, + 'has_more': True + } + } + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/resources/state.py b/hathor/nanocontracts/resources/state.py new file mode 100644 index 000000000..d44362368 --- /dev/null +++ b/hathor/nanocontracts/resources/state.py @@ -0,0 +1,477 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, Optional + +from pydantic import Field + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.crypto.util import decode_address +from hathor.nanocontracts.api_arguments_parser import parse_nc_method_call +from hathor.nanocontracts.exception import NanoContractDoesNotExist +from hathor.nanocontracts.nc_types import make_nc_type_for_field_type +from hathor.nanocontracts.types import ContractId, VertexId +from hathor.utils.api import ErrorResponse, QueryParams, Response +from hathor.wallet.exceptions import InvalidAddress + +if TYPE_CHECKING: + from twisted.web.http import Request + + from hathor.manager import HathorManager + from hathor.nanocontracts.storage import NCContractStorage + from hathor.transaction import Block + + +@register_resource +class NanoContractStateResource(Resource): + """ Implements a web server GET API to get a nano contract state. + You must run with option `--status `. + """ + isLeaf = True + + def __init__(self, manager: 'HathorManager') -> None: + super().__init__() + self.manager = manager + + def render_GET(self, request: 'Request') -> bytes: + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + params = NCStateParams.from_request(request) + if isinstance(params, ErrorResponse): + request.setResponseCode(400) + return params.json_dumpb() + + if sum(x is not None for x in (params.block_hash, params.block_height, params.timestamp)) > 1: + request.setResponseCode(400) + error_response = ErrorResponse( + success=False, + error='only one of `block_hash`, `block_height`, or `timestamp` must be used', + ) + return error_response.json_dumpb() + + try: + nc_id_bytes = ContractId(VertexId(bytes.fromhex(params.id))) + except ValueError: + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid id: {params.id}') + return error_response.json_dumpb() + + nc_storage: NCContractStorage + block: Block + block_hash: Optional[bytes] + try: + block_hash = bytes.fromhex(params.block_hash) if params.block_hash else None + except ValueError: + # This error will be raised in case the block_hash parameter is an invalid hex + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid block_hash parameter: {params.block_hash}') + return error_response.json_dumpb() + + if params.block_height is not None: + # Get hash of the block with the height + if self.manager.tx_storage.indexes is None: + # No indexes enabled in the storage + request.setResponseCode(503) + error_response = ErrorResponse( + success=False, + error='No indexes enabled in the storage, so we can\'t filter by block height.' + ) + return error_response.json_dumpb() + + block_hash = self.manager.tx_storage.indexes.height.get(params.block_height) + if block_hash is None: + # No block hash was found with this height + request.setResponseCode(400) + error_response = ErrorResponse( + success=False, + error=f'No block hash was found with height {params.block_height}.' + ) + return error_response.json_dumpb() + elif params.timestamp is not None: + if self.manager.tx_storage.indexes is None: + # No indexes enabled in the storage + request.setResponseCode(503) + error_response = ErrorResponse( + success=False, + error='No indexes enabled in the storage, so we can\'t filter by timestamp.' + ) + return error_response.json_dumpb() + + block_hashes, has_more = self.manager.tx_storage.indexes.sorted_blocks.get_older( + timestamp=params.timestamp, + hash_bytes=None, + count=1, + ) + if not block_hashes: + # No block hash was found before this timestamp + request.setResponseCode(400) + error_response = ErrorResponse( + success=False, + error=f'No block hash was found before timestamp {params.timestamp}.' + ) + return error_response.json_dumpb() + assert len(block_hashes) == 1 + block_hash = block_hashes[0] + + if block_hash: + try: + block = self.manager.tx_storage.get_block(block_hash) + except AssertionError: + # This block hash is not from a block + request.setResponseCode(400) + error_response = ErrorResponse(success=False, error=f'Invalid block_hash {params.block_hash}.') + return error_response.json_dumpb() + else: + block = self.manager.tx_storage.get_best_block() + + try: + runner = self.manager.get_nc_runner(block) + nc_storage = runner.get_storage(nc_id_bytes) + except NanoContractDoesNotExist: + # Nano contract does not exist at this block + request.setResponseCode(404) + error_response = ErrorResponse( + success=False, + error=f'Nano contract does not exist at block {block.hash_hex}.' + ) + return error_response.json_dumpb() + + blueprint_id = nc_storage.get_blueprint_id() + blueprint_class = self.manager.tx_storage.get_blueprint_class(blueprint_id) + + value: Any + # Get balances. + balances: dict[str, NCBalanceSuccessResponse | NCValueErrorResponse] = {} + for token_uid_hex in params.balances: + if token_uid_hex == '__all__': + # User wants to get the balance of all tokens in the nano contract + all_balances = nc_storage.get_all_balances() + for key_balance, balance in all_balances.items(): + balances[key_balance.token_uid.hex()] = NCBalanceSuccessResponse( + value=str(balance.value), + can_mint=balance.can_mint, + can_melt=balance.can_melt, + ) + break + + try: + token_uid = bytes.fromhex(token_uid_hex) + except ValueError: + balances[token_uid_hex] = NCValueErrorResponse(errmsg='invalid token id') + continue + + balance = nc_storage.get_balance(token_uid) + balances[token_uid_hex] = NCBalanceSuccessResponse( + value=str(balance.value), + can_mint=balance.can_mint, + can_melt=balance.can_melt, + ) + + # Get fields. + fields: dict[str, NCValueSuccessResponse | NCValueErrorResponse] = {} + param_fields: list[str] = params.fields + for field in param_fields: + key_field = self.get_key_for_field(field) + if key_field is None: + fields[field] = NCValueErrorResponse(errmsg='invalid format') + continue + + try: + field_type = blueprint_class.__annotations__[field] + except KeyError: + fields[field] = NCValueErrorResponse(errmsg='not a blueprint field') + continue + + try: + field_nc_type = make_nc_type_for_field_type(field_type) + value = nc_storage.get_obj(key_field.encode(), field_nc_type) + except KeyError: + fields[field] = NCValueErrorResponse(errmsg='field not found') + continue + + if type(value) is bytes: + value = value.hex() + fields[field] = NCValueSuccessResponse(value=value) + + # Call view methods. + runner.disable_call_trace() # call trace is not required for calling view methods. + calls: dict[str, NCValueSuccessResponse | NCValueErrorResponse] = {} + for call_info in params.calls: + try: + method_name, method_args = parse_nc_method_call(blueprint_class, call_info) + value = runner.call_view_method(nc_id_bytes, method_name, *method_args) + if type(value) is bytes: + value = value.hex() + except Exception as e: + calls[call_info] = NCValueErrorResponse(errmsg=repr(e)) + else: + calls[call_info] = NCValueSuccessResponse(value=value) + + response = NCStateResponse( + success=True, + nc_id=params.id, + blueprint_id=blueprint_id.hex(), + blueprint_name=blueprint_class.__name__, + fields=fields, + balances=balances, + calls=calls, + ) + return response.json_dumpb() + + def get_key_for_field(self, field: str) -> Optional[str]: + """Return the storage key for a given field.""" + # Queries might have multiple parts separated by '.' + parts = field.split('.') + try: + key_parts = [self.parse_field_name(name) for name in parts] + except ValueError: + return None + return ':'.join(key_parts) + + def parse_field_name(self, field: str) -> str: + """Parse field names.""" + if field.startswith("a'") and field.endswith("'"): + # Addresses are decoded to bytes + address = field[2:-1] + try: + return str(decode_address(address)) + except InvalidAddress as e: + raise ValueError from e + elif field.startswith("b'") and field.endswith("'"): + # This field is bytes and we receive this in hexa + hexa = field[2:-1] + # This will raise ValueError in case it's an invalid hexa + # and this will be handled in the get_key_for_field method + return str(bytes.fromhex(hexa)) + return field + + +class NCStateParams(QueryParams): + id: str + fields: list[str] = Field(alias='fields[]', default_factory=list) + balances: list[str] = Field(alias='balances[]', default_factory=list) + calls: list[str] = Field(alias='calls[]', default_factory=list) + block_hash: Optional[str] + block_height: Optional[int] + timestamp: Optional[int] + + +class NCValueSuccessResponse(Response): + value: Any + + +class NCBalanceSuccessResponse(Response): + value: str + can_mint: bool + can_melt: bool + + +class NCValueErrorResponse(Response): + errmsg: str + + +class NCStateResponse(Response): + success: bool + nc_id: str + blueprint_id: str + blueprint_name: str + fields: dict[str, NCValueSuccessResponse | NCValueErrorResponse] + balances: dict[str, NCBalanceSuccessResponse | NCValueErrorResponse] + calls: dict[str, NCValueSuccessResponse | NCValueErrorResponse] + + +_openapi_success_value = { + 'success': True, + 'nc_id': '00007f246f6d645ef3174f2eddf53f4b6bd41e8be0c0b7fbea9827cf53e12d9e', + 'blueprint_id': '3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595', + 'blueprint_name': 'Bet', + 'fields': { + 'token_uid': {'value': '00'}, + 'total': {'value': 300}, + 'final_result': {'value': '1x0'}, + 'oracle_script': {'value': '76a91441c431ff7ad5d6ce5565991e3dcd5d9106cfd1e288ac'}, + 'withdrawals.a\'Wi8zvxdXHjaUVAoCJf52t3WovTZYcU9aX6\'': {'value': 300}, + 'address_details.a\'Wi8zvxdXHjaUVAoCJf52t3WovTZYcU9aX6\'': {'value': {'1x0': 100}}, + } +} + + +NanoContractStateResource.openapi = { + '/nano_contract/state': { + 'x-visibility': 'public', + 'x-rate-limit': { + 'global': [ + { + 'rate': '30r/s', + 'burst': 20, + 'delay': 10 + } + ], + 'per-ip': [ + { + 'rate': '5r/s', + 'burst': 6, + 'delay': 3 + } + ] + }, + 'get': { + 'tags': ['nano_contracts'], + 'operationId': 'nano_contracts_state', + 'summary': 'Get state of a nano contract', + 'description': 'Returns the state requested of a nano contract.', + 'parameters': [ + { + 'name': 'id', + 'in': 'query', + 'description': 'ID of the nano contract to get the state from', + 'required': True, + 'schema': { + 'type': 'string' + } + }, + { + 'name': 'balances[]', + 'in': 'query', + 'description': 'List of token ids in hex to get the contract balance. ' + 'If you want to get the balance for all tokens in the contract, just use __all__.', + 'required': False, + 'schema': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + 'examples': { + 'balances': { + 'summary': 'Example of balances', + 'value': ['00', '000008f2ee2059a189322ae7cb1d7e7773dcb4fdc8c4de8767f63022b3731845'] + }, + } + }, + { + 'name': 'calls[]', + 'in': 'query', + 'description': 'List of private method calls to be executed. ' + 'The format must be "method_name(arg1, arg2, arg3, ...)". ' + 'Bytes arguments must be sent in hex, address arguments in bytes ' + 'must be sent as hex itself, or in base58 with the address tag, e.g. ' + 'a\'Wi8zvxdXHjaUVAoCJf52t3WovTZYcU9aX6\', and tuple arguments must be ' + 'sent as an array, e.g., (a, b, c) must be sent as [a, b, c]. ' + 'For SignedData field we expect a list with two elements, where the ' + 'first one is the data to be signed and the second is the signature in hex.', + 'required': False, + 'schema': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + 'examples': { + 'calls': { + 'summary': 'Example of calls', + 'value': ['view_method_1(arg1, arg2)', 'view_method_2()'] + }, + } + }, + { + 'name': 'fields[]', + 'in': 'query', + 'description': 'Fields to get the data from the nano contract state', + 'required': False, + 'schema': { + 'type': 'array', + 'items': { + 'type': 'string' + } + }, + 'examples': { + 'simple fields': { + 'summary': 'Only direct fields', + 'value': ['token_uid', 'total', 'final_result', 'oracle_script'] + }, + 'With dict fields': { + 'summary': ('Simple and dict fields (dict fields where the keys are addresses). ' + 'For an address you must encapsulate the b58 with a\'\''), + 'value': [ + 'token_uid', + 'total', + 'final_result', + 'oracle_script', + 'withdrawals.a\'Wi8zvxdXHjaUVAoCJf52t3WovTZYcU9aX6\'', + 'address_details.a\'Wi8zvxdXHjaUVAoCJf52t3WovTZYcU9aX6\'' + ] + }, + } + }, + { + 'name': 'block_height', + 'in': 'query', + 'description': 'Height of the block to get the nano contract state from.' + 'Can\'t be used together with block_hash or timestamp parameter.', + 'required': False, + 'schema': { + 'type': 'int' + } + }, + { + 'name': 'block_hash', + 'in': 'query', + 'description': 'Hash of the block to get the nano contract state from.' + 'Can\'t be used together with block_height or timestamp parameter.', + 'required': False, + 'schema': { + 'type': 'string' + } + }, + { + 'name': 'timestamp', + 'in': 'query', + 'description': 'Timestamp to get the nano contract state from.' + 'Can\'t be used together with block_hash or block_height parameter.', + 'required': False, + 'schema': { + 'type': 'int' + } + }, + ], + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + 'summary': 'Success to get state from nano', + 'value': _openapi_success_value, + }, + 'error': { + 'summary': 'Invalid nano contract ID', + 'value': { + 'success': False, + 'message': 'Invalid nano contract ID.' + } + }, + } + } + } + } + } + } + } +} diff --git a/hathor/nanocontracts/rng.py b/hathor/nanocontracts/rng.py new file mode 100644 index 000000000..f0401cc82 --- /dev/null +++ b/hathor/nanocontracts/rng.py @@ -0,0 +1,120 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import Any, Sequence, TypeVar + +from cryptography.hazmat.primitives.ciphers import Cipher, CipherContext, algorithms + +from hathor.difficulty import Hash + +T = TypeVar('T') + + +class NoMethodOverrideMeta(type): + __slots__ = () + + def __setattr__(cls, name: str, value: Any) -> None: + raise AttributeError(f'Cannot override method `{name}`') + + +class NanoRNG(metaclass=NoMethodOverrideMeta): + """Implement a deterministic random number generator that will be used by the sorter. + + This implementation uses the ChaCha20 encryption as RNG. + """ + + __slots__ = ('__seed', '__encryptor') + + def __init__(self, seed: bytes) -> None: + self.__seed: Hash + object.__setattr__(self, '_NanoRNG__seed', Hash(seed)) + + key = self.__seed + nonce = self.__seed[:16] + + algorithm = algorithms.ChaCha20(key, nonce) + cipher = Cipher(algorithm, mode=None) + + self.__encryptor: CipherContext + object.__setattr__(self, '_NanoRNG__encryptor', cipher.encryptor()) + + @classmethod + def create_with_shell(cls, seed: bytes) -> NanoRNG: + """Create a NanoRNG instance wrapped in a lightweight shell subclass. + + This method dynamically creates a subclass of NanoRNG (a "shell" class) and instantiates it. The shell class is + useful to prevent sharing classes and objects among different contracts. + """ + class ShellNanoRNG(NanoRNG): + __slots__ = () + + return ShellNanoRNG(seed=seed) + + def __setattr__(self, name: str, value: Any) -> None: + raise AttributeError("Cannot assign methods to this object.") + + @property + def seed(self) -> Hash: + """Return the seed used to create the RNG.""" + return self.__seed + + def randbytes(self, size: int) -> bytes: + """Return a random string of bytes.""" + assert size >= 1 + ciphertext = self.__encryptor.update(b'\0' * size) + assert len(ciphertext) == size + return ciphertext + + def randbits(self, bits: int) -> int: + """Return a random integer in the range [0, 2**bits).""" + assert bits >= 1 + size = (bits + 7) // 8 + ciphertext = self.randbytes(size) + x = int.from_bytes(ciphertext, byteorder='little', signed=False) + return x % (2**bits) + + def randbelow(self, n: int) -> int: + """Return a random integer in the range [0, n).""" + assert n >= 1 + k = n.bit_length() + r = self.randbits(k) # 0 <= r < 2**k + while r >= n: + r = self.randbits(k) + return r + + def randrange(self, start: int, stop: int, step: int = 1) -> int: + """Return a random integer in the range [start, stop) with a given step. + + Roughly equivalent to `choice(range(start, stop, step))` but supports arbitrarily large ranges.""" + assert stop > start + assert step >= 1 + qty = (stop - start + step - 1) // step + k = self.randbelow(qty) + return start + k * step + + def randint(self, a: int, b: int) -> int: + """Return a random integer in the range [a, b].""" + assert b >= a + return a + self.randbelow(b - a + 1) + + def choice(self, seq: Sequence[T]) -> T: + """Choose a random element from a non-empty sequence.""" + return seq[self.randbelow(len(seq))] + + def random(self) -> float: + """Return a random float in the range [0, 1).""" + # 2**53 is the maximum integer float can represent without loss of precision. + return self.randbits(53) / 2**53 diff --git a/hathor/nanocontracts/runner/__init__.py b/hathor/nanocontracts/runner/__init__.py new file mode 100644 index 000000000..8cd21f2ed --- /dev/null +++ b/hathor/nanocontracts/runner/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts.runner.runner import Runner +from hathor.nanocontracts.runner.types import CallInfo, CallRecord, CallType + +__all__ = [ + 'CallType', + 'CallRecord', + 'CallInfo', + 'Runner', +] diff --git a/hathor/nanocontracts/runner/runner.py b/hathor/nanocontracts/runner/runner.py new file mode 100644 index 000000000..fe7e6831c --- /dev/null +++ b/hathor/nanocontracts/runner/runner.py @@ -0,0 +1,1086 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict +from typing import Any, Callable, Concatenate, ParamSpec, Sequence, TypeVar + +from typing_extensions import assert_never + +from hathor.conf.settings import HATHOR_TOKEN_UID, HathorSettings +from hathor.nanocontracts.balance_rules import BalanceRules +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.blueprint_env import BlueprintEnvironment +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import ( + NCAlreadyInitializedContractError, + NCFail, + NCForbiddenAction, + NCInvalidContext, + NCInvalidContractId, + NCInvalidInitializeMethodCall, + NCInvalidMethodCall, + NCInvalidPublicMethodCallFromView, + NCInvalidSyscall, + NCMethodNotFound, + NCUninitializedContractError, + NCViewMethodError, +) +from hathor.nanocontracts.metered_exec import MeteredExecutor +from hathor.nanocontracts.method import Method, ReturnOnly +from hathor.nanocontracts.rng import NanoRNG +from hathor.nanocontracts.runner.types import ( + CallInfo, + CallRecord, + CallType, + IndexUpdateRecordType, + SyscallCreateContractRecord, + SyscallUpdateTokensRecord, + UpdateAuthoritiesRecord, + UpdateAuthoritiesRecordType, +) +from hathor.nanocontracts.storage import NCBlockStorage, NCChangesTracker, NCContractStorage, NCStorageFactory +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import ( + NC_ALLOWED_ACTIONS_ATTR, + NC_FALLBACK_METHOD, + NC_INITIALIZE_METHOD, + Address, + BaseTokenAction, + BlueprintId, + ContractId, + NCAcquireAuthorityAction, + NCAction, + NCActionType, + NCArgs, + NCDepositAction, + NCGrantAuthorityAction, + NCParsedArgs, + NCRawArgs, + NCWithdrawalAction, + TokenUid, + VertexId, +) +from hathor.nanocontracts.utils import ( + derive_child_contract_id, + derive_child_token_id, + is_nc_fallback_method, + is_nc_public_method, + is_nc_view_method, +) +from hathor.reactor import ReactorProtocol +from hathor.transaction import Transaction +from hathor.transaction.exceptions import TransactionDataError +from hathor.transaction.storage import TransactionStorage +from hathor.transaction.util import ( + clean_token_string, + get_deposit_amount, + get_withdraw_amount, + validate_token_name_and_symbol, +) + +P = ParamSpec('P') +T = TypeVar('T') + +MAX_SEQNUM_JUMP_SIZE: int = 10 + + +def _forbid_syscall_from_view( + display_name: str, +) -> Callable[[Callable[Concatenate['Runner', P], T]], Callable[Concatenate['Runner', P], T]]: + """Mark a syscall method as forbidden to be called from @view methods.""" + def decorator(fn: Callable[Concatenate['Runner', P], T]) -> Callable[Concatenate['Runner', P], T]: + def wrapper(self: Runner, /, *args: P.args, **kwargs: P.kwargs) -> T: + current_call_record = self.get_current_call_record() + if current_call_record.type is CallType.VIEW: + raise NCViewMethodError(f'@view method cannot call `syscall.{display_name}`') + return fn(self, *args, **kwargs) + return wrapper + return decorator + + +class Runner: + """Runner with support for call between contracts. + """ + MAX_RECURSION_DEPTH: int = 100 + MAX_CALL_COUNTER: int = 250 + + def __init__( + self, + *, + reactor: ReactorProtocol, + settings: HathorSettings, + tx_storage: TransactionStorage, + storage_factory: NCStorageFactory, + block_storage: NCBlockStorage, + seed: bytes | None, + ) -> None: + self.tx_storage = tx_storage + self.storage_factory = storage_factory + self.block_storage = block_storage + self._storages: dict[ContractId, NCContractStorage] = {} + self._settings = settings + self.reactor = reactor + + # For tracking fuel and memory usage + self._initial_fuel = self._settings.NC_INITIAL_FUEL_TO_CALL_METHOD + self._memory_limit = self._settings.NC_MEMORY_LIMIT_TO_CALL_METHOD + self._metered_executor: MeteredExecutor | None = None + + # Flag indicating to keep record of all calls. + self._enable_call_trace = True + + # Information about the last call. + self._last_call_info: CallInfo | None = None + + # Information about the current call. + self._call_info: CallInfo | None = None + + self._rng: NanoRNG | None = NanoRNG(seed) if seed is not None else None + self._rng_per_contract: dict[ContractId, NanoRNG] = {} + + # Information about updated tokens in the current call via syscalls. + self._updated_tokens_totals: defaultdict[TokenUid, int] = defaultdict(int) + + def execute_from_tx(self, tx: Transaction) -> None: + """Execute the contract's method call.""" + # Check seqnum. + nano_header = tx.get_nano_header() + + if nano_header.is_creating_a_new_contract(): + contract_id = ContractId(VertexId(tx.hash)) + else: + contract_id = ContractId(VertexId(nano_header.nc_id)) + + assert nano_header.nc_seqnum >= 0 + current_seqnum = self.block_storage.get_address_seqnum(Address(nano_header.nc_address)) + diff = nano_header.nc_seqnum - current_seqnum + if diff <= 0 or diff > MAX_SEQNUM_JUMP_SIZE: + # Fail execution if seqnum is invalid. + self._last_call_info = self._build_call_info(contract_id) + # TODO: Set the seqnum in this case? + raise NCFail(f'invalid seqnum (diff={diff})') + self.block_storage.set_address_seqnum(Address(nano_header.nc_address), nano_header.nc_seqnum) + + vertex_metadata = tx.get_metadata() + assert vertex_metadata.first_block is not None, 'execute must only be called after first_block is updated' + + context = nano_header.get_context() + assert context.vertex.block.hash == vertex_metadata.first_block + + nc_args = NCRawArgs(nano_header.nc_args_bytes) + if nano_header.is_creating_a_new_contract(): + blueprint_id = BlueprintId(VertexId(nano_header.nc_id)) + self.create_contract_with_nc_args(contract_id, blueprint_id, context, nc_args) + else: + self.call_public_method_with_nc_args(contract_id, nano_header.nc_method, context, nc_args) + + def disable_call_trace(self) -> None: + """Disable call trace. Useful when the runner is only used to call view methods, for example in APIs.""" + self._enable_call_trace = False + + def get_last_call_info(self) -> CallInfo: + """Get last call information.""" + assert self._last_call_info is not None + return self._last_call_info + + def has_contract_been_initialized(self, contract_id: ContractId) -> bool: + """Check whether a contract has been initialized or not.""" + if contract_id in self._storages: + return True + return self.block_storage.has_contract(contract_id) + + def get_storage(self, contract_id: ContractId) -> NCContractStorage: + """Return the storage for a contract. + + If no storage has been created, then one will be created.""" + storage = self._storages.get(contract_id) + if storage is None: + storage = self.block_storage.get_contract_storage(contract_id) + storage.lock() + self._storages[contract_id] = storage + return storage + + def _create_changes_tracker(self, contract_id: ContractId) -> NCChangesTracker: + """Return the latest change tracker for a contract.""" + nc_storage = self.get_current_changes_tracker_or_storage(contract_id) + change_tracker = NCChangesTracker(contract_id, nc_storage) + return change_tracker + + def get_blueprint_id(self, contract_id: ContractId) -> BlueprintId: + """Return the blueprint id of a contract.""" + nc_storage = self.get_current_changes_tracker_or_storage(contract_id) + return nc_storage.get_blueprint_id() + + def _build_call_info(self, contract_id: ContractId) -> CallInfo: + from hathor.nanocontracts.nc_exec_logs import NCLogger + return CallInfo( + MAX_RECURSION_DEPTH=self.MAX_RECURSION_DEPTH, + MAX_CALL_COUNTER=self.MAX_CALL_COUNTER, + enable_call_trace=self._enable_call_trace, + nc_logger=NCLogger(__reactor__=self.reactor, __nc_id__=contract_id), + ) + + def call_public_method( + self, + contract_id: ContractId, + method_name: str, + ctx: Context, + *args: Any, + **kwargs: Any, + ) -> Any: + """Call a contract public method.""" + nc_args = NCParsedArgs(args, kwargs) + return self.call_public_method_with_nc_args(contract_id, method_name, ctx, nc_args) + + def call_public_method_with_nc_args( + self, + contract_id: ContractId, + method_name: str, + ctx: Context, + nc_args: NCArgs, + ) -> Any: + """Call a contract public method with pre-constructed NCArgs.""" + if method_name == NC_INITIALIZE_METHOD: + raise NCInvalidInitializeMethodCall( + 'Cannot call initialize from call_public_method(); use create_contract() instead.' + ) + try: + ret = self._unsafe_call_public_method(contract_id, method_name, ctx, nc_args) + finally: + self._reset_all_change_trackers() + return ret + + def _unsafe_call_public_method( + self, + contract_id: ContractId, + method_name: str, + ctx: Context, + nc_args: NCArgs, + ) -> Any: + """Invoke a public method without running the usual guard‑safety checks. + + Used by call_public_method() and create_contract().""" + + assert self._call_info is None + self._call_info = self._build_call_info(contract_id) + + if not self.has_contract_been_initialized(contract_id): + raise NCUninitializedContractError('cannot call methods from uninitialized contracts') + + self._metered_executor = MeteredExecutor(fuel=self._initial_fuel, memory_limit=self._memory_limit) + + blueprint_id = self.get_blueprint_id(contract_id) + + ret = self._execute_public_method_call( + contract_id=contract_id, + blueprint_id=blueprint_id, + method_name=method_name, + ctx=ctx, + nc_args=nc_args, + ) + + self._validate_balances(ctx) + self._commit_all_changes_to_storage() + + # Reset the tokens counters so this Runner can be reused (in blueprint tests, for example). + self._updated_tokens_totals = defaultdict(int) + return ret + + @_forbid_syscall_from_view('call_public_method') + def syscall_call_another_contract_public_method( + self, + contract_id: ContractId, + method_name: str, + actions: Sequence[NCAction], + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> Any: + """Call another contract's public method. This method must be called by a blueprint during an execution.""" + if method_name == NC_INITIALIZE_METHOD: + raise NCInvalidInitializeMethodCall('cannot call initialize from another contract') + + if self.get_current_contract_id() == contract_id: + raise NCInvalidContractId('a contract cannot call itself') + + if not self.has_contract_been_initialized(contract_id): + raise NCUninitializedContractError('cannot call a method from an uninitialized contract') + + blueprint_id = self.get_blueprint_id(contract_id) + nc_args = NCParsedArgs(args, kwargs) + return self._unsafe_call_another_contract_public_method( + contract_id=contract_id, + blueprint_id=blueprint_id, + method_name=method_name, + actions=actions, + nc_args=nc_args, + ) + + @_forbid_syscall_from_view('proxy_call_public_method') + def syscall_proxy_call_public_method( + self, + blueprint_id: BlueprintId, + method_name: str, + actions: Sequence[NCAction], + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> Any: + """Execute a proxy call to another blueprint's public method (similar to a DELEGATECALL). + This method must be called by a blueprint during an execution. + + When using delegatecall: + - The code from the target blueprint runs as if it were part of the calling contract + - For all purposes, it is a call to the calling contract + - The storage context remains that of the calling contract + """ + nc_args = NCParsedArgs(args, kwargs) + return self.syscall_proxy_call_public_method_nc_args(blueprint_id, method_name, actions, nc_args) + + @_forbid_syscall_from_view('proxy_call_public_method_nc_args') + def syscall_proxy_call_public_method_nc_args( + self, + blueprint_id: BlueprintId, + method_name: str, + actions: Sequence[NCAction], + nc_args: NCArgs, + ) -> Any: + if method_name == NC_INITIALIZE_METHOD: + raise NCInvalidInitializeMethodCall('cannot call initialize from another contract') + + contract_id = self.get_current_contract_id() + + if blueprint_id == self.get_blueprint_id(contract_id): + raise NCInvalidSyscall('cannot call the same blueprint') + + return self._unsafe_call_another_contract_public_method( + contract_id=contract_id, + blueprint_id=blueprint_id, + method_name=method_name, + actions=actions, + nc_args=nc_args, + ) + + def _unsafe_call_another_contract_public_method( + self, + contract_id: ContractId, + blueprint_id: BlueprintId, + method_name: str, + actions: Sequence[NCAction], + nc_args: NCArgs, + ) -> Any: + """Invoke another contract's public method without running the usual guard‑safety checks. + + Used by call_another_contract_public_method() and create_another_contract().""" + assert self._call_info is not None + + last_call_record = self.get_current_call_record() + + if last_call_record.type is CallType.VIEW: + raise NCInvalidPublicMethodCallFromView('cannot call a public method from a view method') + + # Validate actions. + for action in actions: + if isinstance(action, BaseTokenAction) and action.amount < 0: + raise NCInvalidContext('amount must be positive') + + first_ctx = self._call_info.stack[0].ctx + assert first_ctx is not None + + # Execute the actions on the caller side. The callee side is executed by the `_execute_public_method_call()` + # call below, if it succeeds. + previous_changes_tracker = last_call_record.changes_tracker + for action in actions: + rules = BalanceRules.get_rules(self._settings, action) + rules.nc_caller_execution_rule(previous_changes_tracker) + + # Call the other contract method. + ctx = Context( + actions=actions, + vertex=first_ctx.vertex, + address=last_call_record.contract_id, + timestamp=first_ctx.timestamp, + ) + return self._execute_public_method_call( + contract_id=contract_id, + blueprint_id=blueprint_id, + method_name=method_name, + ctx=ctx, + nc_args=nc_args, + ) + + def _reset_all_change_trackers(self) -> None: + """Reset all changes and prepare for next call.""" + assert self._call_info is not None + for change_trackers in self._call_info.change_trackers.values(): + for change_tracker in change_trackers: + if not change_tracker.has_been_commited: + change_tracker.block() + self._last_call_info = self._call_info + self._call_info = None + + def _validate_balances(self, ctx: Context) -> None: + """ + Validate that all balances are non-negative and assert that + the total diffs match the actions from the main call. + """ + assert self._call_info is not None + assert self._call_info.calls is not None + + # total_diffs accumulates the balance differences for all contracts called during this execution. + total_diffs: defaultdict[TokenUid, int] = defaultdict(int) + + # Each list of change trackers account for a single call in a contract. + for change_trackers in self._call_info.change_trackers.values(): + assert len(change_trackers) == 1, 'after execution, each contract must have exactly one change tracker' + change_tracker = change_trackers[0] + change_tracker.validate_balances_are_positive() + + # Update total_diffs according to the diffs caused by each call, for each token. + for balance_key, balance in change_tracker.get_balance_diff().items(): + total_diffs[TokenUid(balance_key.token_uid)] += balance + + # Accumulate tokens totals from syscalls to compare with the totals from this runner. + calculated_tokens_totals: defaultdict[TokenUid, int] = defaultdict(int) + for call in self._call_info.calls: + if call.index_updates is None: + assert call.type is CallType.VIEW + continue + for record in call.index_updates: + match record: + case SyscallCreateContractRecord() | UpdateAuthoritiesRecord(): + # Nothing to do here. + pass + case SyscallUpdateTokensRecord(): + calculated_tokens_totals[record.token_uid] += record.token_amount + calculated_tokens_totals[TokenUid(HATHOR_TOKEN_UID)] += record.htr_amount + case _: + assert_never(record) + + assert calculated_tokens_totals == self._updated_tokens_totals, ( + f'conflicting updated tokens totals: {calculated_tokens_totals, self._updated_tokens_totals}' + ) + + # Update total_diffs according to syscalls caused by each call. + for token_uid, amount in self._updated_tokens_totals.items(): + total_diffs[token_uid] -= amount + + # Now we do the inverse, accounting for all actions in the main call. + for action in ctx.__all_actions__: + match action: + case NCDepositAction(): + total_diffs[action.token_uid] -= action.amount + + case NCWithdrawalAction(): + total_diffs[action.token_uid] += action.amount + + case NCGrantAuthorityAction() | NCAcquireAuthorityAction(): + # These actions don't affect the tx balance, + # so no need to account for them. + pass + + case _: + assert_never(action) + + assert all(diff == 0 for diff in total_diffs.values()), ( + f'change tracker diffs do not match actions: {total_diffs}' + ) + + def _commit_all_changes_to_storage(self) -> None: + """Commit all change trackers.""" + assert self._call_info is not None + for nc_id, change_trackers in self._call_info.change_trackers.items(): + assert len(change_trackers) == 1 + change_tracker = change_trackers[0] + + nc_storage = self._storages[nc_id] + assert change_tracker.storage == nc_storage + nc_storage.unlock() + change_tracker.commit() + nc_storage.lock() + self.block_storage.update_contract_trie(nc_id, nc_storage.get_root_id()) + + def commit(self) -> None: + """Commit all storages and update block trie.""" + for nc_id, nc_storage in self._storages.items(): + nc_storage.unlock() + nc_storage.commit() + nc_storage.lock() + + def _execute_public_method_call( + self, + *, + contract_id: ContractId, + blueprint_id: BlueprintId, + method_name: str, + ctx: Context, + nc_args: NCArgs, + ) -> Any: + """An internal method that actually execute the public method call. + It is also used when a contract calls another contract. + """ + assert self._metered_executor is not None + assert self._call_info is not None + + self._validate_context(ctx) + changes_tracker = self._create_changes_tracker(contract_id) + blueprint = self._create_blueprint_instance(blueprint_id, changes_tracker) + method = getattr(blueprint, method_name, None) + + called_method_name: str = method_name + parser: Method | ReturnOnly + args: tuple[Any, ...] + if method is None: + assert method_name != NC_INITIALIZE_METHOD + fallback_method = getattr(blueprint, NC_FALLBACK_METHOD, None) + if fallback_method is None: + raise NCMethodNotFound(f'method `{method_name}` not found and no fallback is provided') + method = fallback_method + assert is_nc_fallback_method(method) + parser = ReturnOnly.from_callable(method) + called_method_name = NC_FALLBACK_METHOD + args = method_name, nc_args + else: + if not is_nc_public_method(method): + raise NCInvalidMethodCall(f'method `{method_name}` is not a public method') + parser = Method.from_callable(method) + args = self._validate_nc_args_for_method(parser, nc_args) + + call_record = CallRecord( + type=CallType.PUBLIC, + depth=self._call_info.depth, + contract_id=contract_id, + blueprint_id=blueprint_id, + method_name=called_method_name, + ctx=ctx, + args=args, + changes_tracker=changes_tracker, + index_updates=[], + ) + self._call_info.pre_call(call_record) + + self._validate_actions(method, called_method_name, ctx) + for action in ctx.__all_actions__: + rules = BalanceRules.get_rules(self._settings, action) + rules.nc_callee_execution_rule(changes_tracker) + self._handle_index_update(action) + + try: + # Although the context is immutable, we're passing a copy to the blueprint method as an added precaution. + # This ensures that, even if the blueprint method attempts to exploit or alter the context, it cannot + # impact the original context. Since the runner relies on the context for other critical checks, any + # unauthorized modification would pose a serious security risk. + ret = self._metered_executor.call(method, args=(ctx.copy(), *args)) + except NCFail: + raise + except Exception as e: + # Convert any other exception to NCFail. + raise NCFail from e + + if len(self._call_info.change_trackers[contract_id]) > 1: + call_record.changes_tracker.commit() + + self._call_info.post_call(call_record) + return self._validate_return_type_for_method(parser, ret) + + @staticmethod + def _validate_nc_args_for_method(method: Method, nc_args: NCArgs) -> tuple[Any, ...]: + """ + Given a method and its NCArgs, return the merged args and kwargs, + while validating their types and cloning the objects. + """ + args_bytes: bytes + match nc_args: + case NCParsedArgs(): + # Even though we could simply validate the type with `check_value/isinstance` and return the args, + # we do a round-trip to create a new instance and secure mutation of objects across contracts. + args_bytes = method.serialize_args_bytes(nc_args.args, nc_args.kwargs) + case NCRawArgs(args_bytes): + # Nothing to do, we can just deserialize the bytes directly. + pass + case _: + assert_never(nc_args) + + return method.deserialize_args_bytes(args_bytes) + + @staticmethod + def _validate_return_type_for_method(method: Method | ReturnOnly, return_value: Any) -> Any: + """ + Given a method and its return value, return that value, while validating its type and cloning the object. + """ + # Even though we could simply validate the type with `check_value/isinstance` and return the value, + # we do a round-trip to create a new instance and secure mutation of objects across contracts. + return_bytes = method.serialize_return_bytes(return_value) + return method.deserialize_return_bytes(return_bytes) + + def call_view_method(self, contract_id: ContractId, method_name: str, *args: Any, **kwargs: Any) -> Any: + """Call a contract view method.""" + assert self._call_info is None + self._call_info = self._build_call_info(contract_id) + try: + return self._unsafe_call_view_method(contract_id, method_name, args, kwargs) + finally: + self._reset_all_change_trackers() + + def _handle_index_update(self, action: NCAction) -> None: + """For each action in a public method call, create the appropriate index update records.""" + call_record = self.get_current_call_record() + assert call_record.index_updates is not None + + match action: + case NCDepositAction() | NCWithdrawalAction(): + # Since these actions only affect indexes when used via a transaction call + # (not when used across contracts), they are handled only once when the tx + # is added to indexes (more specifically, to the tokens index). + pass + case NCGrantAuthorityAction() | NCAcquireAuthorityAction(): + # Since these actions "duplicate" authorities, they must be + # handled everytime they're used, even across contracts. + # That's why they account for index update records. + record = UpdateAuthoritiesRecord( + token_uid=action.token_uid, + sub_type=UpdateAuthoritiesRecordType.GRANT, + mint=action.mint, + melt=action.melt, + ) + call_record.index_updates.append(record) + case _: + assert_never(action) + + def syscall_call_another_contract_view_method( + self, + contract_id: ContractId, + method_name: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> Any: + """Call the view method of another contract.""" + assert self._call_info is not None + if self.get_current_contract_id() == contract_id: + raise NCInvalidContractId('a contract cannot call itself') + return self._unsafe_call_view_method(contract_id, method_name, args, kwargs) + + def _unsafe_call_view_method( + self, + contract_id: ContractId, + method_name: str, + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> Any: + """Call a contract view method without handling resets.""" + assert self._call_info is not None + if not self.has_contract_been_initialized(contract_id): + raise NCUninitializedContractError('cannot call methods from uninitialized contracts') + + if self._metered_executor is None: + self._metered_executor = MeteredExecutor(fuel=self._initial_fuel, memory_limit=self._memory_limit) + + changes_tracker = self._create_changes_tracker(contract_id) + blueprint_id = self.get_blueprint_id(contract_id) + blueprint = self._create_blueprint_instance(blueprint_id, changes_tracker) + method = getattr(blueprint, method_name, None) + + if method is None: + raise NCMethodNotFound(method_name) + if not is_nc_view_method(method): + raise NCInvalidMethodCall('not a view method') + + parser = Method.from_callable(method) + args = self._validate_nc_args_for_method(parser, NCParsedArgs(args, kwargs)) + + call_record = CallRecord( + type=CallType.VIEW, + depth=self._call_info.depth, + contract_id=contract_id, + blueprint_id=blueprint_id, + method_name=method_name, + ctx=None, + args=args, + changes_tracker=changes_tracker, + index_updates=None, + ) + self._call_info.pre_call(call_record) + + ret = self._metered_executor.call(method, args=args) + + if not changes_tracker.is_empty(): + raise NCViewMethodError('view methods cannot change the state') + + self._call_info.post_call(call_record) + return self._validate_return_type_for_method(parser, ret) + + def get_balance_before_current_call(self, contract_id: ContractId | None, token_uid: TokenUid | None) -> Balance: + """ + Return the contract balance for a given token before the current call, that is, + excluding any actions and changes in the current call. + """ + return self._get_balance(contract_id=contract_id, token_uid=token_uid, before_current_call=True) + + def get_current_balance(self, contract_id: ContractId | None, token_uid: TokenUid | None) -> Balance: + """ + Return the current contract balance for a given token, + which includes all actions and changes in the current call. + """ + return self._get_balance(contract_id=contract_id, token_uid=token_uid, before_current_call=False) + + def _get_balance( + self, + *, + contract_id: ContractId | None, + token_uid: TokenUid | None, + before_current_call: bool, + ) -> Balance: + """Internal implementation of get_balance.""" + if contract_id is None: + contract_id = self.get_current_contract_id() + if token_uid is None: + token_uid = TokenUid(HATHOR_TOKEN_UID) + + storage: NCContractStorage + if self._call_info is None: + storage = self.get_storage(contract_id) + else: + changes_tracker = self.get_current_changes_tracker(contract_id) + storage = changes_tracker.storage if before_current_call else changes_tracker + + return storage.get_balance(bytes(token_uid)) + + def get_current_call_record(self) -> CallRecord: + """Return the call record for the current method being executed.""" + assert self._call_info is not None + return self._call_info.stack[-1] + + def get_current_contract_id(self) -> ContractId: + """Return the contract id for the current method being executed.""" + call_record = self.get_current_call_record() + return call_record.contract_id + + def get_current_changes_tracker(self, contract_id: ContractId) -> NCChangesTracker: + """Return the NCChangesTracker for the current method being executed.""" + assert self._call_info is not None + change_trackers = self._call_info.change_trackers[contract_id] + assert len(change_trackers) > 0 + return change_trackers[-1] + + def get_current_changes_tracker_or_storage(self, contract_id: ContractId) -> NCContractStorage: + """Return the current NCChangesTracker if it exists or NCContractStorage otherwise.""" + if self._call_info is not None and contract_id in self._call_info.change_trackers: + change_trackers = self._call_info.change_trackers[contract_id] + assert len(change_trackers) > 0 + return change_trackers[-1] + else: + return self.get_storage(contract_id) + + @_forbid_syscall_from_view('rng') + def syscall_get_rng(self) -> NanoRNG: + """Return the RNG for the current contract being executed.""" + if self._rng is None: + raise ValueError('no seed was provided') + contract_id = self.get_current_contract_id() + if contract_id not in self._rng_per_contract: + self._rng_per_contract[contract_id] = NanoRNG.create_with_shell(seed=self._rng.randbytes(32)) + return self._rng_per_contract[contract_id] + + def _internal_create_contract(self, contract_id: ContractId, blueprint_id: BlueprintId) -> None: + """Create a new contract without calling the initialize() method.""" + assert not self.has_contract_been_initialized(contract_id) + assert contract_id not in self._storages + nc_storage = self.block_storage.get_empty_contract_storage(contract_id) + nc_storage.set_blueprint_id(blueprint_id) + self._storages[contract_id] = nc_storage + + def create_contract( + self, + contract_id: ContractId, + blueprint_id: BlueprintId, + ctx: Context, + *args: Any, + **kwargs: Any, + ) -> Any: + """Create contract and call its initialize() method.""" + nc_args = NCParsedArgs(args, kwargs) + return self.create_contract_with_nc_args(contract_id, blueprint_id, ctx, nc_args) + + def create_contract_with_nc_args( + self, + contract_id: ContractId, + blueprint_id: BlueprintId, + ctx: Context, + nc_args: NCArgs, + ) -> Any: + """Create contract and call its initialize() method with pre-constructed NCArgs.""" + if self.has_contract_been_initialized(contract_id): + raise NCAlreadyInitializedContractError(contract_id) + + self._internal_create_contract(contract_id, blueprint_id) + try: + ret = self._unsafe_call_public_method(contract_id, NC_INITIALIZE_METHOD, ctx, nc_args) + finally: + self._reset_all_change_trackers() + return ret + + @_forbid_syscall_from_view('create_contract') + def syscall_create_another_contract( + self, + blueprint_id: BlueprintId, + salt: bytes, + actions: Sequence[NCAction], + args: tuple[Any, ...], + kwargs: dict[str, Any], + ) -> tuple[ContractId, Any]: + """Create a contract from another contract.""" + if not salt: + raise Exception('invalid salt') + + assert self._call_info is not None + last_call_record = self.get_current_call_record() + parent_id = last_call_record.contract_id + child_id = derive_child_contract_id(parent_id, salt, blueprint_id) + + if self.has_contract_been_initialized(child_id): + raise NCAlreadyInitializedContractError(child_id) + + self._internal_create_contract(child_id, blueprint_id) + nc_args = NCParsedArgs(args, kwargs) + ret = self._unsafe_call_another_contract_public_method( + child_id, + blueprint_id, + NC_INITIALIZE_METHOD, + actions, + nc_args, + ) + + assert last_call_record.index_updates is not None + syscall_record = SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=child_id) + last_call_record.index_updates.append(syscall_record) + return child_id, ret + + @_forbid_syscall_from_view('revoke_authorities') + def syscall_revoke_authorities(self, token_uid: TokenUid, *, revoke_mint: bool, revoke_melt: bool) -> None: + """Revoke authorities from this nano contract.""" + call_record = self.get_current_call_record() + contract_id = call_record.contract_id + if token_uid == HATHOR_TOKEN_UID: + raise NCInvalidSyscall(f'contract {contract_id.hex()} cannot revoke authorities from HTR token') + + changes_tracker = self.get_current_changes_tracker(contract_id) + assert changes_tracker.nc_id == call_record.contract_id + balance = changes_tracker.get_balance(token_uid) + + if revoke_mint and not balance.can_mint: + raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot mint {token_uid.hex()} tokens') + + if revoke_melt and not balance.can_melt: + raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot melt {token_uid.hex()} tokens') + + changes_tracker.revoke_authorities( + token_uid, + revoke_mint=revoke_mint, + revoke_melt=revoke_melt, + ) + + assert call_record.index_updates is not None + syscall_record = UpdateAuthoritiesRecord( + token_uid=token_uid, + sub_type=UpdateAuthoritiesRecordType.REVOKE, + mint=revoke_mint, + melt=revoke_melt, + ) + call_record.index_updates.append(syscall_record) + + @_forbid_syscall_from_view('mint_tokens') + def syscall_mint_tokens(self, token_uid: TokenUid, amount: int) -> None: + """Mint tokens and add them to the balance of this nano contract.""" + call_record = self.get_current_call_record() + if token_uid == HATHOR_TOKEN_UID: + raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot mint HTR tokens') + + changes_tracker = self.get_current_changes_tracker(call_record.contract_id) + assert changes_tracker.nc_id == call_record.contract_id + balance = changes_tracker.get_balance(token_uid) + + if not balance.can_mint: + raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot mint {token_uid.hex()} tokens') + + token_amount = amount + htr_amount = -get_deposit_amount(self._settings, token_amount) + + changes_tracker.add_balance(token_uid, token_amount) + changes_tracker.add_balance(HATHOR_TOKEN_UID, htr_amount) + + self._updated_tokens_totals[token_uid] += token_amount + self._updated_tokens_totals[TokenUid(HATHOR_TOKEN_UID)] += htr_amount + + assert call_record.index_updates is not None + syscall_record = SyscallUpdateTokensRecord( + type=IndexUpdateRecordType.MINT_TOKENS, + token_uid=token_uid, + token_amount=token_amount, + htr_amount=htr_amount, + ) + call_record.index_updates.append(syscall_record) + + @_forbid_syscall_from_view('melt_tokens') + def syscall_melt_tokens(self, token_uid: TokenUid, amount: int) -> None: + """Melt tokens by removing them from the balance of this nano contract.""" + call_record = self.get_current_call_record() + if token_uid == HATHOR_TOKEN_UID: + raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot melt HTR tokens') + + changes_tracker = self.get_current_changes_tracker(call_record.contract_id) + assert changes_tracker.nc_id == call_record.contract_id + balance = changes_tracker.get_balance(token_uid) + + if not balance.can_melt: + raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot melt {token_uid.hex()} tokens') + + token_amount = -amount + htr_amount = get_withdraw_amount(self._settings, token_amount) + + changes_tracker.add_balance(token_uid, token_amount) + changes_tracker.add_balance(HATHOR_TOKEN_UID, htr_amount) + + self._updated_tokens_totals[token_uid] += token_amount + self._updated_tokens_totals[TokenUid(HATHOR_TOKEN_UID)] += htr_amount + + assert call_record.index_updates is not None + syscall_record = SyscallUpdateTokensRecord( + type=IndexUpdateRecordType.MELT_TOKENS, + token_uid=token_uid, + token_amount=token_amount, + htr_amount=htr_amount, + ) + call_record.index_updates.append(syscall_record) + + def _validate_context(self, ctx: Context) -> None: + """Check whether the context is valid.""" + for token_uid, actions in ctx.actions.items(): + for action in actions: + if token_uid != action.token_uid: + raise NCInvalidContext('token_uid mismatch') + if isinstance(action, BaseTokenAction) and action.amount < 0: + raise NCInvalidContext('amount must be positive') + + def _validate_actions(self, method: Any, method_name: str, ctx: Context) -> None: + """Check whether actions are allowed.""" + allowed_actions: set[NCActionType] = getattr(method, NC_ALLOWED_ACTIONS_ATTR, set()) + assert isinstance(allowed_actions, set) + + for actions in ctx.actions.values(): + for action in actions: + if action.type not in allowed_actions: + raise NCForbiddenAction(f'action {action.name} is forbidden on method `{method_name}`') + + def _create_blueprint_instance(self, blueprint_id: BlueprintId, changes_tracker: NCChangesTracker) -> Blueprint: + """Create a new blueprint instance.""" + assert self._call_info is not None + env = BlueprintEnvironment(self, self._call_info.nc_logger, changes_tracker) + blueprint_class = self.tx_storage.get_blueprint_class(blueprint_id) + return blueprint_class(env) + + @_forbid_syscall_from_view('create_token') + def syscall_create_child_token( + self, + token_name: str, + token_symbol: str, + amount: int, + mint_authority: bool, + melt_authority: bool, + ) -> TokenUid: + """Create a child token from a contract.""" + try: + validate_token_name_and_symbol(self._settings, token_name, token_symbol) + except TransactionDataError as e: + raise NCInvalidSyscall('invalid token description') from e + + last_call_record = self.get_current_call_record() + parent_id = last_call_record.contract_id + cleaned_token_symbol = clean_token_string(token_symbol) + token_id = derive_child_token_id(parent_id, cleaned_token_symbol) + + token_amount = amount + htr_amount = get_deposit_amount(self._settings, token_amount) + + changes_tracker = self.get_current_changes_tracker(parent_id) + changes_tracker.create_token(token_id, token_name, token_symbol) + changes_tracker.grant_authorities( + token_id, + grant_mint=mint_authority, + grant_melt=melt_authority, + ) + changes_tracker.add_balance(token_id, amount) + changes_tracker.add_balance(HATHOR_TOKEN_UID, -htr_amount) + self._updated_tokens_totals[token_id] += amount + self._updated_tokens_totals[TokenUid(HATHOR_TOKEN_UID)] -= htr_amount + + assert last_call_record.index_updates is not None + syscall_record = SyscallUpdateTokensRecord( + type=IndexUpdateRecordType.CREATE_TOKEN, + token_uid=token_id, + token_amount=token_amount, + htr_amount=-htr_amount, + token_symbol=token_symbol, + token_name=token_name, + ) + last_call_record.index_updates.append(syscall_record) + + return token_id + + @_forbid_syscall_from_view('emit_event') + def syscall_emit_event(self, data: bytes) -> None: + """Emit a custom event from a Nano Contract.""" + assert self._call_info is not None + self._call_info.nc_logger.__emit_event__(data) + + @_forbid_syscall_from_view('change_blueprint') + def syscall_change_blueprint(self, blueprint_id: BlueprintId) -> None: + """Change the blueprint of a contract.""" + assert self._call_info is not None + last_call_record = self.get_current_call_record() + if last_call_record.type is CallType.VIEW: + raise NCInvalidPublicMethodCallFromView('forbidden') + + # The blueprint must exist. If an unknown blueprint is provided, it will raise an BlueprintDoesNotExist + # exception. + self.tx_storage.get_blueprint_class(blueprint_id) + + nc_storage = self.get_current_changes_tracker(last_call_record.contract_id) + nc_storage.set_blueprint_id(blueprint_id) + + +class RunnerFactory: + __slots__ = ('reactor', 'settings', 'tx_storage', 'nc_storage_factory') + + def __init__( + self, + *, + reactor: ReactorProtocol, + settings: HathorSettings, + tx_storage: TransactionStorage, + nc_storage_factory: NCStorageFactory, + ) -> None: + self.reactor = reactor + self.settings = settings + self.tx_storage = tx_storage + self.nc_storage_factory = nc_storage_factory + + def create(self, *, block_storage: NCBlockStorage, seed: bytes | None = None) -> Runner: + return Runner( + reactor=self.reactor, + settings=self.settings, + tx_storage=self.tx_storage, + storage_factory=self.nc_storage_factory, + block_storage=block_storage, + seed=seed, + ) diff --git a/hathor/nanocontracts/runner/types.py b/hathor/nanocontracts/runner/types.py new file mode 100644 index 000000000..0b48490b2 --- /dev/null +++ b/hathor/nanocontracts/runner/types.py @@ -0,0 +1,266 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass, field +from enum import StrEnum, auto, unique +from typing import TYPE_CHECKING, Any, TypeAlias + +from typing_extensions import Literal, Self, assert_never + +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCNumberOfCallsExceeded, NCRecursionError +from hathor.nanocontracts.storage import NCChangesTracker, NCContractStorage +from hathor.nanocontracts.types import BlueprintId, ContractId, TokenUid, VertexId + +if TYPE_CHECKING: + from hathor.nanocontracts.nc_exec_logs import NCLogger + + +@unique +class CallType(StrEnum): + PUBLIC = auto() + VIEW = auto() + + +@unique +class IndexUpdateRecordType(StrEnum): + CREATE_CONTRACT = auto() + MINT_TOKENS = auto() + MELT_TOKENS = auto() + CREATE_TOKEN = auto() + UPDATE_AUTHORITIES = auto() + + +@dataclass(slots=True, frozen=True, kw_only=True) +class SyscallCreateContractRecord: + blueprint_id: BlueprintId + contract_id: ContractId + + def to_json(self) -> dict[str, Any]: + return dict( + type=IndexUpdateRecordType.CREATE_CONTRACT, + blueprint_id=self.blueprint_id.hex(), + contract_id=self.contract_id.hex(), + ) + + @classmethod + def from_json(cls, json_dict: dict[str, Any]) -> Self: + assert json_dict['type'] == IndexUpdateRecordType.CREATE_CONTRACT + return cls( + contract_id=ContractId(VertexId(bytes.fromhex(json_dict['contract_id']))), + blueprint_id=BlueprintId(VertexId(bytes.fromhex(json_dict['blueprint_id']))), + ) + + +@dataclass(slots=True, frozen=True, kw_only=True) +class SyscallUpdateTokensRecord: + type: ( + Literal[IndexUpdateRecordType.MINT_TOKENS] + | Literal[IndexUpdateRecordType.MELT_TOKENS] + | Literal[IndexUpdateRecordType.CREATE_TOKEN] + ) + token_uid: TokenUid + token_amount: int + htr_amount: int + token_symbol: str | None = None + token_name: str | None = None + + def __post_init__(self) -> None: + match self.type: + case IndexUpdateRecordType.MINT_TOKENS | IndexUpdateRecordType.CREATE_TOKEN: + assert self.token_amount > 0 and self.htr_amount < 0 + case IndexUpdateRecordType.MELT_TOKENS: + assert self.token_amount < 0 and self.htr_amount > 0 + case _: + assert_never(self.type) + + def to_json(self) -> dict[str, Any]: + return dict( + type=self.type, + token_uid=self.token_uid.hex(), + token_amount=self.token_amount, + htr_amount=self.htr_amount, + ) + + @classmethod + def from_json(cls, json_dict: dict[str, Any]) -> Self: + valid_types = ( + IndexUpdateRecordType.MINT_TOKENS, IndexUpdateRecordType.MINT_TOKENS, IndexUpdateRecordType.CREATE_TOKEN + ) + assert json_dict['type'] in valid_types + return cls( + type=json_dict['type'], + token_uid=TokenUid(VertexId(bytes.fromhex(json_dict['token_uid']))), + token_amount=json_dict['token_amount'], + htr_amount=json_dict['htr_amount'], + ) + + +@unique +class UpdateAuthoritiesRecordType(StrEnum): + GRANT = auto() + REVOKE = auto() + + +@dataclass(slots=True, frozen=True, kw_only=True) +class UpdateAuthoritiesRecord: + token_uid: TokenUid + sub_type: UpdateAuthoritiesRecordType + mint: bool + melt: bool + + def __post_init__(self) -> None: + assert self.mint or self.melt + + def to_json(self) -> dict[str, Any]: + return dict( + type=IndexUpdateRecordType.UPDATE_AUTHORITIES, + token_uid=self.token_uid.hex(), + sub_type=self.sub_type, + mint=self.mint, + melt=self.melt, + ) + + @classmethod + def from_json(cls, json_dict: dict[str, Any]) -> Self: + assert json_dict['type'] == IndexUpdateRecordType.UPDATE_AUTHORITIES + return cls( + token_uid=TokenUid(VertexId(bytes.fromhex(json_dict['token_uid']))), + sub_type=UpdateAuthoritiesRecordType(json_dict['sub_type']), + mint=json_dict['mint'], + melt=json_dict['melt'], + ) + + +NCIndexUpdateRecord: TypeAlias = SyscallCreateContractRecord | SyscallUpdateTokensRecord | UpdateAuthoritiesRecord + + +def nc_index_update_record_from_json(json_dict: dict[str, Any]) -> NCIndexUpdateRecord: + syscall_type = IndexUpdateRecordType(json_dict['type']) + match syscall_type: + case IndexUpdateRecordType.CREATE_CONTRACT: + return SyscallCreateContractRecord.from_json(json_dict) + case ( + IndexUpdateRecordType.MINT_TOKENS + | IndexUpdateRecordType.MELT_TOKENS + | IndexUpdateRecordType.CREATE_TOKEN + ): + return SyscallUpdateTokensRecord.from_json(json_dict) + case IndexUpdateRecordType.UPDATE_AUTHORITIES: + return UpdateAuthoritiesRecord.from_json(json_dict) + case _: + raise assert_never(f'invalid syscall record type: "{syscall_type}"') + + +@dataclass(slots=True, frozen=True, kw_only=True) +class CallRecord: + """This object keeps information about a single call between contracts.""" + + # The type of the method being called (public or private). + type: CallType + + # The depth in the call stack. + depth: int + + # The contract being invoked. + contract_id: ContractId + + # The blueprint at the time of execution. + blueprint_id: BlueprintId + + # The method being invoked. + method_name: str + + # The context passed in this call. + ctx: Context | None + + # The args provided to the method. + args: tuple[Any, ...] + + # Keep track of all changes made by this call. + changes_tracker: NCChangesTracker + + # A list of actions or syscalls that affect indexes. None when it's a VIEW call. + index_updates: list[NCIndexUpdateRecord] | None + + +@dataclass(slots=True, kw_only=True) +class CallInfo: + """This object keeps information about a method call and its subsequence calls.""" + MAX_RECURSION_DEPTH: int + MAX_CALL_COUNTER: int + + # The execution stack. This stack is dynamic and changes as the execution progresses. + stack: list[CallRecord] = field(default_factory=list) + + # Change trackers are grouped by contract. Because multiple calls can occur between contracts, leading to more than + # one NCChangesTracker per contract, a stack is used. This design makes it fast to retrieve the most recent tracker + # for a given contract whenever a new call is made. + change_trackers: dict[ContractId, list[NCChangesTracker]] = field(default_factory=dict) + + # Flag to enable/disable keeping record of all calls. + enable_call_trace: bool + + # A trace of the calls that happened. This will only be filled if `enable_call_trace` is true. + calls: list[CallRecord] | None = None + + # Counter of the number of calls performed so far. This is a dynamic value that changes as the + # execution progresses. + call_counter: int = 0 + + # The logger to keep track of log entries during this call. + nc_logger: NCLogger + + @property + def depth(self) -> int: + """Get the depth of the call stack.""" + return len(self.stack) + + def pre_call(self, call_record: CallRecord) -> None: + """Called before a new call is executed.""" + if self.depth >= self.MAX_RECURSION_DEPTH: + raise NCRecursionError + + if self.call_counter >= self.MAX_CALL_COUNTER: + raise NCNumberOfCallsExceeded + + if self.enable_call_trace: + if self.calls is None: + self.calls = [] + self.calls.append(call_record) + + if call_record.contract_id not in self.change_trackers: + self.change_trackers[call_record.contract_id] = [call_record.changes_tracker] + else: + self.change_trackers[call_record.contract_id].append(call_record.changes_tracker) + + self.call_counter += 1 + self.stack.append(call_record) + self.nc_logger.__log_call_begin__(call_record) + + def post_call(self, call_record: CallRecord) -> None: + """Called after a call is finished.""" + assert call_record == self.stack.pop() + assert call_record.changes_tracker == self.change_trackers[call_record.contract_id][-1] + assert call_record.changes_tracker.nc_id == call_record.changes_tracker.storage.nc_id + + change_trackers = self.change_trackers[call_record.contract_id] + if len(change_trackers) > 1: + assert call_record.changes_tracker.storage == change_trackers[-2] + assert call_record.changes_tracker == change_trackers.pop() + else: + assert type(call_record.changes_tracker.storage) is NCContractStorage + self.nc_logger.__log_call_end__() diff --git a/hathor/nanocontracts/sorter/__init__.py b/hathor/nanocontracts/sorter/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathor/nanocontracts/sorter/random_sorter.py b/hathor/nanocontracts/sorter/random_sorter.py new file mode 100644 index 000000000..b2e2ea43e --- /dev/null +++ b/hathor/nanocontracts/sorter/random_sorter.py @@ -0,0 +1,192 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import hashlib +from collections import defaultdict +from dataclasses import dataclass + +from typing_extensions import Self + +from hathor.nanocontracts.rng import NanoRNG +from hathor.transaction import Block, Transaction +from hathor.types import Address, VertexId + + +def random_nc_calls_sorter(block: Block, nc_calls: list[Transaction]) -> list[Transaction]: + sorter = NCBlockSorter.create_from_block(block, nc_calls) + seed = hashlib.sha256(block.hash).digest() + + order = sorter.generate_random_topological_order(seed) + tx_by_id = dict((tx.hash, tx) for tx in nc_calls) + assert set(order) == set(tx_by_id.keys()) + + ret: list[Transaction] = [] + for _id in order: + ret.append(tx_by_id[_id]) + return ret + + +@dataclass(slots=True, kw_only=True) +class SorterNode: + id: VertexId + outgoing_edges: set[VertexId] + incoming_edges: set[VertexId] + + def copy(self) -> 'SorterNode': + return SorterNode( + id=self.id, + outgoing_edges=set(self.outgoing_edges), + incoming_edges=set(self.incoming_edges), + ) + + +class NCBlockSorter: + """This class is responsible for sorting a list of Nano cryptocurrency + transactions to be executed by the consensus algorithm. The transactions + are sorted in topological order, ensuring proper dependency management. + + Algorithm: + + 1. Construct a Directed Acyclic Graph (DAG) of dependencies in O(n). + 2. Add "dummy" nodes between groups of txs with the same seqnum, acting as proxies for DAG dependencies. + 3. Apply Kahn's algorithm to produce a topological sort in O(n). Skip nodes that are not part of nc_calls, + that is, with IDs that are either not txs, not NCs, or are dummy nodes. + """ + __slots__ = ('db', '_dirty', '_block', '_nc_hashes') + + def __init__(self, nc_hashes: set[VertexId]) -> None: + self.db: dict[VertexId, SorterNode] = {} + self._dirty: bool = False + self._block: Block | None = None + self._nc_hashes = nc_hashes + + @classmethod + def create_from_block(cls, block: Block, nc_calls: list[Transaction]) -> Self: + """Create a Sorter instance from the nano transactions confirmed by a block.""" + nc_hashes = set(tx.hash for tx in nc_calls) + sorter = cls(nc_hashes) + sorter._block = block + + # Add only edges from the funds DAG to the graph. + for tx in block.iter_transactions_in_this_block(): + sorter.add_vertex(tx.hash) + + if tx.is_nano_contract(): + nano_header = tx.get_nano_header() + sorter.add_edge(tx.hash, nano_header.nc_id) + + for txin in tx.inputs: + sorter.add_edge(tx.hash, txin.tx_id) + + # Add edges from nano seqnum. + + # A dict of txs grouped by address and then seqnum. + grouped_txs: defaultdict[Address, defaultdict[int, list[Transaction]]] = defaultdict(lambda: defaultdict(list)) + dummy_nodes = 0 + + for tx in nc_calls: + assert tx.is_nano_contract() + nano_header = tx.get_nano_header() + grouped_txs[nano_header.nc_address][nano_header.nc_seqnum].append(tx) + + for _address, txs_by_seqnum in grouped_txs.items(): + sorted_by_seqnum = sorted(txs_by_seqnum.items()) + for i in range(1, len(sorted_by_seqnum)): + prev_seqnum, prev_txs = sorted_by_seqnum[i - 1] + curr_seqnum, curr_txs = sorted_by_seqnum[i] + dummy_node_id = f'dummy:{dummy_nodes}'.encode() + sorter.add_vertex(dummy_node_id) + dummy_nodes += 1 + + # Add edges from the dummy node to all prev_txs + for prev_tx in prev_txs: + sorter.add_edge(dummy_node_id, prev_tx.hash) + + # Add edges from curr_txs to the dummy node only when the + # tx's timestamp is greater than all prev_txs timestamps + max_prev_txs_timestamp = max(prev_txs, key=lambda tx: tx.timestamp).timestamp + for curr_tx in curr_txs: + if curr_tx.timestamp > max_prev_txs_timestamp: + sorter.add_edge(curr_tx.hash, dummy_node_id) + + return sorter + + def copy(self) -> NCBlockSorter: + """Copy the sorter. It is useful if one wants to call get_random_topological_order() multiple times.""" + if self._dirty: + raise RuntimeError('copying a dirty sorter') + new_sorter = NCBlockSorter(self._nc_hashes) + for vertex_id, vertex in self.db.items(): + new_sorter.db[vertex_id] = vertex.copy() + return new_sorter + + def add_vertex(self, _id: VertexId) -> None: + """Add a vertex to the DAG.""" + _ = self.get_node(_id) + + def add_edge(self, from_: VertexId, to: VertexId) -> None: + """Add the edge (_from, _to) to this DAG.""" + assert from_ != to + self.get_node(from_).outgoing_edges.add(to) + self.get_node(to).incoming_edges.add(from_) + + def get_node(self, id_: VertexId) -> SorterNode: + """Get a node by id or create one if it does not exist.""" + vertex = self.db.get(id_) + if vertex is not None: + return vertex + + vertex = SorterNode(id=id_, outgoing_edges=set(), incoming_edges=set()) + self.db[id_] = vertex + return vertex + + def get_vertices_with_no_outgoing_edges(self) -> list[VertexId]: + """Get all vertices with no outgoing edges.""" + return [v.id for v in self.db.values() if not v.outgoing_edges] + + def generate_random_topological_order(self, seed: bytes) -> list[VertexId]: + """Generate a random topological order according to the DAG. + + This method can only be called once because it changes the DAG during its execution. + """ + if self._dirty: + raise RuntimeError('this method can only be called once') + self._dirty = True + + rng = NanoRNG(seed) + + candidates = self.get_vertices_with_no_outgoing_edges() + ret = [] + for i in range(len(self.db)): + assert len(candidates) > 0, 'empty candidates, probably caused by circular dependencies in the graph' + idx = rng.randbelow(len(candidates)) + # FIXME pop() runs in O(n) + vertex_id = candidates.pop(idx) + + # Skip all nodes that do not belong to nc_calls, which are either non-nano txs or dummy nodes. + if vertex_id in self._nc_hashes: + ret.append(vertex_id) + + vertex = self.get_node(vertex_id) + assert not vertex.outgoing_edges + for in_vertex_id in vertex.incoming_edges: + in_vertex = self.get_node(in_vertex_id) + in_vertex.outgoing_edges.remove(vertex_id) + + if not in_vertex.outgoing_edges: + candidates.append(in_vertex_id) + + return ret diff --git a/hathor/nanocontracts/sorter/timestamp_sorter.py b/hathor/nanocontracts/sorter/timestamp_sorter.py new file mode 100644 index 000000000..acdd08c8e --- /dev/null +++ b/hathor/nanocontracts/sorter/timestamp_sorter.py @@ -0,0 +1,23 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction import Block, Transaction + + +def timestamp_nc_calls_sorter(block: Block, nc_calls: list[Transaction]) -> list[Transaction]: + """Return the nc_calls sorted by (timestamp, hash). + + DEPRECATED: This is used only to keep compatibility with the alpha nano-testnet. + """ + return sorted(nc_calls, key=lambda tx: (tx.timestamp, tx.hash)) diff --git a/hathor/nanocontracts/sorter/types.py b/hathor/nanocontracts/sorter/types.py new file mode 100644 index 000000000..307c5d823 --- /dev/null +++ b/hathor/nanocontracts/sorter/types.py @@ -0,0 +1,22 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Protocol + +from hathor.transaction import Block, Transaction + + +class NCSorterCallable(Protocol): + def __call__(self, block: Block, nc_calls: list[Transaction]) -> list[Transaction]: + ... diff --git a/hathor/nanocontracts/storage/__init__.py b/hathor/nanocontracts/storage/__init__.py new file mode 100644 index 000000000..37f274af2 --- /dev/null +++ b/hathor/nanocontracts/storage/__init__.py @@ -0,0 +1,29 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts.storage.block_storage import NCBlockStorage +from hathor.nanocontracts.storage.changes_tracker import NCChangesTracker +from hathor.nanocontracts.storage.contract_storage import NCContractStorage +from hathor.nanocontracts.storage.factory import NCMemoryStorageFactory, NCRocksDBStorageFactory, NCStorageFactory +from hathor.nanocontracts.storage.types import DeletedKey + +__all__ = [ + 'NCBlockStorage', + 'NCContractStorage', + 'NCChangesTracker', + 'NCMemoryStorageFactory', + 'NCRocksDBStorageFactory', + 'NCStorageFactory', + 'DeletedKey', +] diff --git a/hathor/nanocontracts/storage/backends.py b/hathor/nanocontracts/storage/backends.py new file mode 100644 index 000000000..d331c2831 --- /dev/null +++ b/hathor/nanocontracts/storage/backends.py @@ -0,0 +1,100 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +from hathor.nanocontracts.storage.node_nc_type import NodeNCType +from hathor.serialization import Deserializer, Serializer +from hathor.storage.rocksdb_storage import RocksDBStorage + +if TYPE_CHECKING: + from hathor.nanocontracts.storage.patricia_trie import Node + + +class NodeTrieStore(ABC): + @abstractmethod + def __getitem__(self, key: bytes) -> Node: + raise NotImplementedError + + @abstractmethod + def __setitem__(self, key: bytes, item: Node) -> None: + raise NotImplementedError + + @abstractmethod + def __len__(self) -> int: + raise NotImplementedError + + @abstractmethod + def __contains__(self, key: bytes) -> bool: + raise NotImplementedError + + +class MemoryNodeTrieStore(NodeTrieStore): + def __init__(self) -> None: + self._db: dict[bytes, Node] = {} + + def __getitem__(self, key: bytes) -> Node: + return self._db[key] + + def __setitem__(self, key: bytes, item: Node) -> None: + self._db[key] = item + + def __len__(self) -> int: + return len(self._db) + + def __contains__(self, key: bytes) -> bool: + return key in self._db + + +class RocksDBNodeTrieStore(NodeTrieStore): + _CF_NAME = b'nc-state' + _KEY_LENGTH = b'length' + + def __init__(self, rocksdb_storage: RocksDBStorage) -> None: + self._rocksdb_storage = rocksdb_storage + self._db = self._rocksdb_storage.get_db() + self._cf_key = self._rocksdb_storage.get_or_create_column_family(self._CF_NAME) + self._node_nc_type = NodeNCType() + + def _serialize_node(self, node: Node, /) -> bytes: + serializer = Serializer.build_bytes_serializer() + self._node_nc_type.serialize(serializer, node) + return bytes(serializer.finalize()) + + def _deserialize_node(self, node_bytes: bytes, /) -> Node: + deserializer = Deserializer.build_bytes_deserializer(node_bytes) + node = self._node_nc_type.deserialize(deserializer) + deserializer.finalize() + return node + + def __getitem__(self, key: bytes) -> Node: + item_bytes = self._db.get((self._cf_key, key)) + if item_bytes is None: + raise KeyError(key.hex()) + return self._deserialize_node(item_bytes) + + def __setitem__(self, key: bytes, item: Node) -> None: + item_bytes = self._serialize_node(item) + self._db.put((self._cf_key, key), item_bytes) + + def __len__(self) -> int: + it = self._db.iterkeys() + it.seek_to_first() + return sum(1 for _ in it) + + def __contains__(self, key: bytes) -> bool: + return bool(self._db.get((self._cf_key, key)) is not None) diff --git a/hathor/nanocontracts/storage/block_storage.py b/hathor/nanocontracts/storage/block_storage.py new file mode 100644 index 000000000..ee870b878 --- /dev/null +++ b/hathor/nanocontracts/storage/block_storage.py @@ -0,0 +1,167 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from enum import Enum +from typing import NamedTuple, Optional + +from hathor.nanocontracts.exception import NanoContractDoesNotExist +from hathor.nanocontracts.nc_types.dataclass_nc_type import make_dataclass_nc_type +from hathor.nanocontracts.storage.contract_storage import NCContractStorage +from hathor.nanocontracts.storage.patricia_trie import NodeId, PatriciaTrie +from hathor.nanocontracts.storage.token_proxy import TokenProxy +from hathor.nanocontracts.types import Address, ContractId, TokenUid +from hathor.transaction.headers.nano_header import ADDRESS_SEQNUM_SIZE +from hathor.utils import leb128 + + +class _Tag(Enum): + CONTRACT = b'\0' + TOKEN = b'\1' + ADDRESS = b'\2' + + +class ContractKey(NamedTuple): + nc_id: bytes + + def __bytes__(self): + return _Tag.CONTRACT.value + self.nc_id + + +class TokenKey(NamedTuple): + token_id: bytes + + def __bytes__(self): + return _Tag.TOKEN.value + self.token_id + + +class AddressKey(NamedTuple): + address: Address + + def __bytes__(self): + return _Tag.ADDRESS.value + self.address + + +class NCBlockStorage: + """This is the storage used by NanoContracts. + + This implementation works for both memory and rocksdb backends.""" + from hathor.transaction.token_creation_tx import TokenDescription + _TOKEN_DESCRIPTION_NC_TYPE = make_dataclass_nc_type(TokenDescription) + + def __init__(self, block_trie: PatriciaTrie) -> None: + self._block_trie: PatriciaTrie = block_trie + + def has_contract(self, contract_id: ContractId) -> bool: + try: + self.get_contract_root_id(contract_id) + except KeyError: + return False + else: + return True + + def get_contract_root_id(self, contract_id: ContractId) -> bytes: + """Return the root id of a contract's storage.""" + key = ContractKey(contract_id) + return self._block_trie.get(bytes(key)) + + def update_contract_trie(self, nc_id: ContractId, root_id: bytes) -> None: + key = ContractKey(nc_id) + self._block_trie.update(bytes(key), root_id) + + def commit(self) -> None: + """Flush all local changes to the storage.""" + self._block_trie.commit() + + def get_root_id(self) -> bytes: + """Return the current merkle root id of the trie.""" + return self._block_trie.root.id + + @staticmethod + def bytes_to_node_id(node_id: Optional[bytes]) -> Optional[NodeId]: + if node_id is None: + return node_id + return NodeId(node_id) + + def _get_trie(self, root_id: Optional[bytes]) -> 'PatriciaTrie': + """Return a PatriciaTrie object with a given root.""" + from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie + store = self._block_trie.get_store() + trie = PatriciaTrie(store, root_id=self.bytes_to_node_id(root_id)) + return trie + + def get_contract_storage(self, contract_id: ContractId) -> NCContractStorage: + try: + nc_root_id = self.get_contract_root_id(contract_id) + trie = self._get_trie(nc_root_id) + except KeyError: + raise NanoContractDoesNotExist(contract_id.hex()) + token_proxy = TokenProxy(self) + return NCContractStorage(trie=trie, nc_id=contract_id, token_proxy=token_proxy) + + def get_empty_contract_storage(self, contract_id: ContractId) -> NCContractStorage: + """Create a new contract storage instance for a given contract.""" + trie = self._get_trie(None) + token_proxy = TokenProxy(self) + return NCContractStorage(trie=trie, nc_id=contract_id, token_proxy=token_proxy) + + def get_token_description(self, token_id: TokenUid) -> TokenDescription: + """Return the token description for a given token_id.""" + key = TokenKey(token_id) + token_description_bytes = self._block_trie.get(bytes(key)) + token_description = self._TOKEN_DESCRIPTION_NC_TYPE.from_bytes(token_description_bytes) + return token_description + + def has_token(self, token_id: TokenUid) -> bool: + """Return True if the token_id already exists in this block's nano state.""" + key = TokenKey(token_id) + try: + self._block_trie.get(bytes(key)) + except KeyError: + return False + else: + return True + + def create_token(self, token_id: TokenUid, token_name: str, token_symbol: str) -> None: + """Create a new token in this block's nano state.""" + from hathor.transaction.token_creation_tx import TokenDescription + + key = TokenKey(token_id) + token_description = TokenDescription(token_id=token_id, token_name=token_name, token_symbol=token_symbol) + token_description_bytes = self._TOKEN_DESCRIPTION_NC_TYPE.to_bytes(token_description) + self._block_trie.update(bytes(key), token_description_bytes) + + def get_address_seqnum(self, address: Address) -> int: + """Get the latest seqnum for an address. + + For clarity, new transactions must have a GREATER seqnum to be able to be executed.""" + key = AddressKey(address) + try: + seqnum_bytes = self._block_trie.get(bytes(key)) + except KeyError: + return -1 + else: + seqnum, buf = leb128.decode_unsigned(seqnum_bytes, max_bytes=ADDRESS_SEQNUM_SIZE) + assert len(buf) == 0 + return seqnum + + def set_address_seqnum(self, address: Address, seqnum: int) -> None: + """Update seqnum for an adress.""" + assert seqnum >= 0 + old_seqnum = self.get_address_seqnum(address) + assert seqnum > old_seqnum + key = AddressKey(address) + seqnum_bytes = leb128.encode_unsigned(seqnum, max_bytes=ADDRESS_SEQNUM_SIZE) + self._block_trie.update(bytes(key), seqnum_bytes) diff --git a/hathor/nanocontracts/storage/changes_tracker.py b/hathor/nanocontracts/storage/changes_tracker.py new file mode 100644 index 000000000..f4353a35e --- /dev/null +++ b/hathor/nanocontracts/storage/changes_tracker.py @@ -0,0 +1,287 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import itertools +from dataclasses import dataclass +from enum import Enum +from types import MappingProxyType +from typing import Any, TypeVar + +from typing_extensions import override + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts.exception import NCInsufficientFunds, NCTokenAlreadyExists +from hathor.nanocontracts.nc_types import NCType +from hathor.nanocontracts.storage.contract_storage import ( + AttrKey, + Balance, + BalanceKey, + MutableBalance, + NCContractStorage, +) +from hathor.nanocontracts.storage.types import _NOT_PROVIDED, DeletedKey, DeletedKeyType +from hathor.nanocontracts.types import BlueprintId, ContractId, TokenUid +from hathor.transaction.token_creation_tx import TokenDescription + +T = TypeVar('T') +D = TypeVar('D') + + +class _NCAuthorityState(Enum): + """The tri-state of an authority during execution.""" + NONE = 'none' + GRANTED = 'granted' + REVOKED = 'revoked' + + +@dataclass(slots=True, kw_only=True) +class _NCAuthorityDiff: + """Track the tri-state diff of each authority.""" + mint: _NCAuthorityState = _NCAuthorityState.NONE + melt: _NCAuthorityState = _NCAuthorityState.NONE + + def grant_mint(self) -> bool: + """Return whether the final mint state of this diff in granted.""" + return self.mint is _NCAuthorityState.GRANTED + + def grant_melt(self) -> bool: + """Return whether the final melt state of this diff in granted.""" + return self.melt is _NCAuthorityState.GRANTED + + def revoke_mint(self) -> bool: + """Return whether the final mint state of this diff in revoked.""" + return self.mint is _NCAuthorityState.REVOKED + + def revoke_melt(self) -> bool: + """Return whether the final melt state of this diff in revoked.""" + return self.melt is _NCAuthorityState.REVOKED + + +class NCChangesTracker(NCContractStorage): + """Keep track of changes during the execution of a contract's method. + + These changes are not committed to the storage.""" + + def __init__(self, nc_id: ContractId, storage: NCContractStorage): + self.storage = storage + self.nc_id = nc_id + + self.data: dict[AttrKey, tuple[Any, NCType | None]] = {} + self._balance_diff: dict[BalanceKey, int] = {} + self._authorities_diff: dict[BalanceKey, _NCAuthorityDiff] = {} + self._created_tokens: dict[TokenUid, TokenDescription] = {} + self._blueprint_id: BlueprintId | None = None + + self.has_been_commited = False + self.has_been_blocked = False + + def create_token(self, token_id: TokenUid, token_name: str, token_symbol: str) -> None: + """Create a new token in this changes tracker.""" + if self.has_token(token_id): + raise NCTokenAlreadyExists + self._created_tokens[token_id] = TokenDescription( + token_id=token_id, + token_name=token_name, + token_symbol=token_symbol, + ) + + def has_token(self, token_id: TokenUid) -> bool: + """Return True if a given token_id already exists.""" + if token_id in self._created_tokens: + return True + return self.storage.has_token(token_id) + + def get_balance_diff(self) -> MappingProxyType[BalanceKey, int]: + """Return the balance diff of this change tracker.""" + return MappingProxyType(self._balance_diff) + + @override + def check_if_locked(self) -> None: + if self.has_been_commited: + raise RuntimeError('you cannot change any value after the commit has been executed') + elif self.has_been_blocked: + raise RuntimeError('you cannot change any value after the changes have been blocked') + + def block(self) -> None: + """Block the changes and prevent them from being committed.""" + self.check_if_locked() + self.has_been_blocked = True + + @override + def get_obj(self, key: bytes, nc_type: NCType[T], *, default: D = _NOT_PROVIDED) -> T | D: + obj_key = self._to_attr_key(key) + obj: T | D | DeletedKeyType + if obj_key in self.data: + obj, _ = self.data[obj_key] + else: + # XXX: extra variable used so mypy can infer the correct type + obj_td = self.storage.get_obj(key, nc_type, default=default) + obj = obj_td + if obj is DeletedKey: + raise KeyError(key) + assert not isinstance(obj, DeletedKeyType) + return obj + + @override + def put_obj(self, key: bytes, nc_type: NCType[T], data: T) -> None: + self.check_if_locked() + nc_type.check_value(data) + obj_key = self._to_attr_key(key) + self.data[obj_key] = (data, nc_type) + + @override + def del_obj(self, key: bytes) -> None: + self.check_if_locked() + obj_key = self._to_attr_key(key) + self.data[obj_key] = (DeletedKey, None) + + @override + def has_obj(self, key: bytes) -> bool: + obj_key = self._to_attr_key(key) + if obj_key in self.data: + obj, _ = self.data[obj_key] + return obj is not DeletedKey + else: + return self.storage.has_obj(key) + + @override + def commit(self) -> None: + """Save the changes in the storage.""" + self.check_if_locked() + for attr_key, (obj, nc_type) in self.data.items(): + if obj is not DeletedKey: + assert nc_type is not None + assert not isinstance(obj, DeletedKeyType) + self.storage.put_obj(attr_key.key, nc_type, obj) + else: + self.storage.del_obj(attr_key.key) + + for balance_key, amount in self._balance_diff.items(): + self.storage.add_balance(balance_key.token_uid, amount) + + for balance_key, diff in self._authorities_diff.items(): + self.storage.grant_authorities( + balance_key.token_uid, + grant_mint=diff.grant_mint(), + grant_melt=diff.grant_melt(), + ) + self.storage.revoke_authorities( + balance_key.token_uid, + revoke_mint=diff.revoke_mint(), + revoke_melt=diff.revoke_melt(), + ) + + for td in self._created_tokens.values(): + self.storage.create_token(TokenUid(td.token_id), td.token_name, td.token_symbol) + + if self._blueprint_id is not None: + self.storage.set_blueprint_id(self._blueprint_id) + + self.has_been_commited = True + + def reset(self) -> None: + """Discard all local changes without persisting.""" + self.data = {} + self._balance_diff = {} + + @override + def _get_mutable_balance(self, token_uid: bytes) -> MutableBalance: + internal_key = BalanceKey(self.nc_id, token_uid) + balance = self.storage._get_mutable_balance(token_uid) + balance_diff = self._balance_diff.get(internal_key, 0) + authorities_diff = self._authorities_diff.get(internal_key, _NCAuthorityDiff()) + + balance.value += balance_diff + balance.grant_authorities( + grant_mint=authorities_diff.grant_mint(), + grant_melt=authorities_diff.grant_melt(), + ) + balance.revoke_authorities( + revoke_mint=authorities_diff.revoke_mint(), + revoke_melt=authorities_diff.revoke_melt(), + ) + + return balance + + def validate_balances_are_positive(self) -> None: + """Check that all final balances are positive. If not, it raises NCInsufficientFunds.""" + for balance_key in self._balance_diff.keys(): + balance = self.get_balance(balance_key.token_uid) + if balance.value < 0: + raise NCInsufficientFunds( + f'negative balance for contract {self.nc_id.hex()} ' + f'(balance={balance} token_uid={balance_key.token_uid.hex()})' + ) + + @override + def get_all_balances(self) -> dict[BalanceKey, Balance]: + all_balance_keys: itertools.chain[BalanceKey] = itertools.chain( + self.storage.get_all_balances().keys(), + # There might be tokens in the change tracker that are still + # not on storage, so we must check and add them as well + self._balance_diff.keys(), + self._authorities_diff.keys(), + ) + + return {key: self.get_balance(key.token_uid) for key in set(all_balance_keys)} + + @override + def add_balance(self, token_uid: bytes, amount: int) -> None: + self.check_if_locked() + internal_key = BalanceKey(self.nc_id, token_uid) + old = self._balance_diff.get(internal_key, 0) + new = old + amount + self._balance_diff[internal_key] = new + + @override + def grant_authorities(self, token_uid: bytes, *, grant_mint: bool, grant_melt: bool) -> None: + assert token_uid != HATHOR_TOKEN_UID + self.check_if_locked() + internal_key = BalanceKey(self.nc_id, token_uid) + diff = self._authorities_diff.get(internal_key, _NCAuthorityDiff()) + diff.mint = _NCAuthorityState.GRANTED if grant_mint else diff.mint + diff.melt = _NCAuthorityState.GRANTED if grant_melt else diff.melt + self._authorities_diff[internal_key] = diff + + @override + def revoke_authorities(self, token_uid: bytes, *, revoke_mint: bool, revoke_melt: bool) -> None: + assert token_uid != HATHOR_TOKEN_UID + self.check_if_locked() + internal_key = BalanceKey(self.nc_id, token_uid) + diff = self._authorities_diff.get(internal_key, _NCAuthorityDiff()) + diff.mint = _NCAuthorityState.REVOKED if revoke_mint else diff.mint + diff.melt = _NCAuthorityState.REVOKED if revoke_melt else diff.melt + self._authorities_diff[internal_key] = diff + + def is_empty(self) -> bool: + # this method is only called in view contexts, so it's impossible for the balance to have changed. + assert not bool(self._balance_diff) + assert not bool(self._authorities_diff) + assert not bool(self._created_tokens) + assert not bool(self._blueprint_id) + return not bool(self.data) + + @override + def get_root_id(self) -> bytes: + raise NotImplementedError + + def get_blueprint_id(self) -> BlueprintId: + if self._blueprint_id is not None: + return self._blueprint_id + return self.storage.get_blueprint_id() + + def set_blueprint_id(self, value: BlueprintId) -> None: + """Set a new blueprint id for the contract.""" + self.check_if_locked() + self._blueprint_id = value diff --git a/hathor/nanocontracts/storage/contract_storage.py b/hathor/nanocontracts/storage/contract_storage.py new file mode 100644 index 000000000..4b2284ab2 --- /dev/null +++ b/hathor/nanocontracts/storage/contract_storage.py @@ -0,0 +1,369 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# XXX: avoid using `from __future__ import annotations` here because `make_dataclass_nc_type` doesn't support it + +import hashlib +from abc import ABC, abstractmethod +from dataclasses import dataclass +from enum import Enum +from typing import TypeVar + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts.nc_types import BytesNCType, NCType +from hathor.nanocontracts.nc_types.dataclass_nc_type import make_dataclass_nc_type +from hathor.nanocontracts.storage.maybedeleted_nc_type import MaybeDeletedNCType +from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie +from hathor.nanocontracts.storage.token_proxy import TokenProxy +from hathor.nanocontracts.storage.types import _NOT_PROVIDED, DeletedKey, DeletedKeyType +from hathor.nanocontracts.types import BlueprintId, TokenUid, VertexId +from hathor.serialization import Deserializer, Serializer + +T = TypeVar('T') +D = TypeVar('D') + +_BYTES_NC_TYPE: NCType[bytes] = BytesNCType() + + +class _Tag(Enum): + ATTR = b'\0' + BALANCE = b'\1' + METADATA = b'\2' + + +class TrieKey(ABC): + @abstractmethod + def __bytes__(self) -> bytes: + raise NotImplementedError + + +@dataclass(frozen=True, slots=True) +class AttrKey(TrieKey): + nc_id: bytes + key: bytes + + def __bytes__(self) -> bytes: + return _Tag.ATTR.value + hashlib.sha1(self.key).digest() + + +@dataclass(frozen=True, slots=True) +class BalanceKey(TrieKey): + nc_id: bytes + token_uid: bytes + + def __bytes__(self) -> bytes: + return _Tag.BALANCE.value + self.token_uid + + +@dataclass(slots=True, frozen=True, kw_only=True) +class Balance: + """ + The balance of a token in the storage, which includes its value (amount of tokens), and the + stored authorities. This class is immutable and therefore suitable to be used externally. + """ + value: int + can_mint: bool + can_melt: bool + + def to_mutable(self) -> 'MutableBalance': + return MutableBalance( + value=self.value, + can_mint=self.can_mint, + can_melt=self.can_melt, + ) + + +@dataclass(slots=True, kw_only=True) +class MutableBalance: + """ + The balance of a token in the storage, which includes its value (amount of tokens), + and the stored authorities. This is a mutable version of the `Balance` class and + therefore only suitable to be used in NCContractStorage and its subclasses. + """ + value: int + can_mint: bool + can_melt: bool + + def grant_authorities(self, *, grant_mint: bool, grant_melt: bool) -> None: + """Grant authorities to this balance, returning a new updated one.""" + self.can_mint = self.can_mint or grant_mint + self.can_melt = self.can_melt or grant_melt + + def revoke_authorities(self, *, revoke_mint: bool, revoke_melt: bool) -> None: + """Revoke authorities from this balance, returning a new updated one.""" + self.can_mint = self.can_mint and not revoke_mint + self.can_melt = self.can_melt and not revoke_melt + + @staticmethod + def get_default() -> 'MutableBalance': + """Get the default empty balance.""" + return MutableBalance(value=0, can_mint=False, can_melt=False) + + def to_immutable(self) -> Balance: + return Balance( + value=self.value, + can_mint=self.can_mint, + can_melt=self.can_melt, + ) + + +_BALANCE_NC_TYPE: NCType[MutableBalance] = make_dataclass_nc_type(MutableBalance) + + +@dataclass(frozen=True, slots=True) +class MetadataKey(TrieKey): + nc_id: bytes + key: bytes + + def __bytes__(self) -> bytes: + return _Tag.METADATA.value + hashlib.sha1(self.key).digest() + + +_BLUEPRINT_ID_KEY = b'blueprint_id' + + +class NCContractStorage: + """This is the storage used by NanoContracts. + + This implementation works for both memory and rocksdb backends.""" + + def __init__(self, *, trie: PatriciaTrie, nc_id: VertexId, token_proxy: TokenProxy) -> None: + # State (balances, metadata and attributes) + self._trie: PatriciaTrie = trie + + # Nano contract id + self.nc_id = nc_id + + # Flag to check whether any change or commit can be executed. + self.is_locked = False + + self._token_proxy = token_proxy + + def has_token(self, token_id: TokenUid) -> bool: + """Return True if token_id exists in the current block.""" + return self._token_proxy.has_token(token_id) + + def create_token(self, token_id: TokenUid, token_name: str, token_symbol: str) -> None: + """Create a new token in the current block.""" + self._token_proxy.create_token(token_id, token_name, token_symbol) + + def lock(self) -> None: + """Lock the storage for changes or commits.""" + self.is_locked = True + + def unlock(self) -> None: + """Unlock the storage.""" + self.is_locked = False + + def check_if_locked(self) -> None: + """Raise a runtime error if the wallet is locked.""" + if self.is_locked: + raise RuntimeError('you cannot modify or commit if the storage is locked') + + def _serialize(self, obj: T | DeletedKeyType, nc_type: NCType[T] | None) -> bytes: + """Serialize a obj to be stored on the trie.""" + serializer = Serializer.build_bytes_serializer() + if nc_type is None: + assert obj is DeletedKey, 'nc_type=None must only be used when obj=DeletedKey' + assert not isinstance(nc_type, MaybeDeletedNCType), 'nested MaybeDeletedNCType' + MaybeDeletedNCType(nc_type).serialize(serializer, obj) + return bytes(serializer.finalize()) + + def _deserialize(self, content: bytes, nc_type: NCType[T]) -> T | DeletedKeyType: + """Deserialize a obj stored on the trie.""" + deserializer = Deserializer.build_bytes_deserializer(content) + assert not isinstance(nc_type, MaybeDeletedNCType), 'nested MaybeDeletedNCType' + obj = MaybeDeletedNCType(nc_type).deserialize(deserializer) + if isinstance(obj, DeletedKeyType): + return DeletedKey + return obj + + def _trie_has_key(self, trie_key: TrieKey) -> bool: + """Returns True if trie-key exists and is not deleted.""" + try: + value_bytes = self._trie.get(bytes(trie_key)) + except KeyError: + return False + if MaybeDeletedNCType.is_deleted_key(value_bytes): + return False + return True + + def _trie_get_obj(self, trie_key: TrieKey, nc_type: NCType[T], *, default: D = _NOT_PROVIDED) -> T | D: + """Internal method that gets the object stored at a given trie-key.""" + obj: T | DeletedKeyType + key_bytes = bytes(trie_key) + try: + content = self._trie.get(key_bytes) + except KeyError: + obj = DeletedKey + else: + # XXX: extra variable used so mypy can infer the correct type + obj_t = self._deserialize(content, nc_type) + obj = obj_t + if obj is DeletedKey: + if default is _NOT_PROVIDED: + raise KeyError(f'trie_key={key_bytes!r}') + return default + assert not isinstance(obj, DeletedKeyType) + return obj + + def _trie_update(self, trie_key: TrieKey, nc_type: NCType[T] | None, obj: T | DeletedKeyType) -> None: + """Internal method that updates the object stored at a given trie-key + + For convenience `nc_type=None` is accepted when `obj=DeletedKey`, since it doesn't affect the serialization, so + knowing the actual NCType isn't needed. + """ + content = self._serialize(obj, nc_type) + self._trie.update(bytes(trie_key), content) + + def _to_attr_key(self, key: bytes) -> AttrKey: + """Return the actual key used in the storage.""" + assert isinstance(key, bytes) + return AttrKey(self.nc_id, key) + + def get_obj(self, key: bytes, nc_type: NCType[T], *, default: D = _NOT_PROVIDED) -> T | D: + """Return the object stored at the given `key`, deserialized with the given NCType. + + XXX: using a different NCType to deserialize than was used to serialize can result in successful + deserialization and cause silent errors. + + It raises KeyError if key is not found and a default is not provided. + """ + obj_key = self._to_attr_key(key) + try: + obj = self._trie_get_obj(obj_key, nc_type, default=default) + except KeyError as e: + raise KeyError(f'key={key!r} key_bytes={bytes(obj_key)!r}') from e + return obj + + def put_obj(self, key: bytes, nc_type: NCType[T], obj: T) -> None: + """Store the `object` for the provided `key` serialized with the given NCType. + """ + self.check_if_locked() + obj_key = self._to_attr_key(key) + self._trie_update(obj_key, nc_type, obj) + + def del_obj(self, key: bytes) -> None: + """Delete `key` from storage. + """ + self.check_if_locked() + obj_key = self._to_attr_key(key) + self._trie_update(obj_key, None, DeletedKey) + + def has_obj(self, key: bytes) -> bool: + """whether an object with the given `key` exists in the storage, also False if the object was deleted.""" + obj_key = self._to_attr_key(key) + return self._trie_has_key(obj_key) + + def _get_metadata(self, key: bytes) -> bytes: + """Return the metadata stored at the given key.""" + metadata_key = MetadataKey(self.nc_id, key) + return self._trie_get_obj(metadata_key, _BYTES_NC_TYPE) + + def _put_metadata(self, key: bytes, metadata_bytes: bytes) -> None: + """Store a new metadata at the given key.""" + metadata_key = MetadataKey(self.nc_id, key) + self._trie_update(metadata_key, _BYTES_NC_TYPE, metadata_bytes) + + def get_blueprint_id(self) -> BlueprintId: + """Return the blueprint id of the contract.""" + return BlueprintId(VertexId(self._get_metadata(_BLUEPRINT_ID_KEY))) + + def set_blueprint_id(self, blueprint_id: BlueprintId, /) -> None: + """Set a new blueprint id for the contract.""" + self.check_if_locked() + return self._put_metadata(_BLUEPRINT_ID_KEY, blueprint_id) + + def get_balance(self, token_uid: bytes) -> Balance: + """Return the contract balance for a token.""" + return self._get_mutable_balance(token_uid).to_immutable() + + def _get_mutable_balance(self, token_uid: bytes) -> MutableBalance: + """Return the mutable balance for a token. For internal use only.""" + balance_key = BalanceKey(self.nc_id, TokenUid(token_uid)) + balance = self._trie_get_obj(balance_key, _BALANCE_NC_TYPE, default=MutableBalance.get_default()) + assert isinstance(balance, MutableBalance) + return balance + + def get_all_balances(self) -> dict[BalanceKey, Balance]: + """Return the contract balances of all tokens.""" + balances: dict[BalanceKey, Balance] = {} + balance_tag = self._trie._encode_key(_Tag.BALANCE.value) + + node = self._trie._find_nearest_node(balance_tag) + if node.key.startswith(balance_tag): + balance_root = node + else: + for prefix, child_id in node.children.items(): + child = self._trie.get_node(child_id) + if child.key.startswith(balance_tag): + balance_root = child + break + else: + # No balance found. + return balances + + for node, _, is_leaf in self._trie.iter_dfs(node=balance_root): + if node.content is None: + # Skip all nodes with no content. + continue + # Found a token. + assert node.content is not None + balance = self._deserialize(node.content, _BALANCE_NC_TYPE) + assert isinstance(balance, MutableBalance) + token_uid = TokenUid(self._trie._decode_key(node.key)[1:]) + key = BalanceKey(self.nc_id, token_uid) + balances[key] = balance.to_immutable() + return balances + + def add_balance(self, token_uid: bytes, amount: int) -> None: + """Change the contract balance value for a token. The amount will be added to the previous balance value. + + Note that the provided `amount` might be negative, but not the result.""" + self.check_if_locked() + balance_key = BalanceKey(self.nc_id, TokenUid(token_uid)) + balance = self._trie_get_obj(balance_key, _BALANCE_NC_TYPE, default=MutableBalance.get_default()) + assert isinstance(balance, MutableBalance) + balance.value += amount + assert balance.value >= 0, f'balance cannot be negative: {balance.value}' + self._trie_update(balance_key, _BALANCE_NC_TYPE, balance) + + def grant_authorities(self, token_uid: bytes, *, grant_mint: bool, grant_melt: bool) -> None: + """Grant authorities to the contract for a token.""" + assert token_uid != HATHOR_TOKEN_UID + self.check_if_locked() + balance_key = BalanceKey(self.nc_id, TokenUid(token_uid)) + balance = self._trie_get_obj(balance_key, _BALANCE_NC_TYPE, default=MutableBalance.get_default()) + assert isinstance(balance, MutableBalance) + balance.grant_authorities(grant_mint=grant_mint, grant_melt=grant_melt) + self._trie_update(balance_key, _BALANCE_NC_TYPE, balance) + + def revoke_authorities(self, token_uid: bytes, *, revoke_mint: bool, revoke_melt: bool) -> None: + """Revoke authorities from the contract for a token.""" + assert token_uid != HATHOR_TOKEN_UID + self.check_if_locked() + balance_key = BalanceKey(self.nc_id, TokenUid(token_uid)) + balance = self._trie_get_obj(balance_key, _BALANCE_NC_TYPE, default=MutableBalance.get_default()) + assert isinstance(balance, MutableBalance) + balance.revoke_authorities(revoke_mint=revoke_mint, revoke_melt=revoke_melt) + self._trie_update(balance_key, _BALANCE_NC_TYPE, balance) + + def commit(self) -> None: + """Flush all local changes to the storage.""" + self.check_if_locked() + self._trie.commit() + + def get_root_id(self) -> bytes: + """Return the current merkle root id of the trie.""" + return self._trie.root.id diff --git a/hathor/nanocontracts/storage/factory.py b/hathor/nanocontracts/storage/factory.py new file mode 100644 index 000000000..e4430b8f6 --- /dev/null +++ b/hathor/nanocontracts/storage/factory.py @@ -0,0 +1,83 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC +from typing import TYPE_CHECKING, Optional + +from hathor.nanocontracts.storage.backends import MemoryNodeTrieStore, NodeTrieStore, RocksDBNodeTrieStore +from hathor.nanocontracts.storage.block_storage import NCBlockStorage + +if TYPE_CHECKING: + from hathor.nanocontracts.storage.patricia_trie import NodeId, PatriciaTrie + from hathor.storage import RocksDBStorage + from hathor.transaction.block import Block + + +class NCStorageFactory(ABC): + _store: 'NodeTrieStore' + + @staticmethod + def bytes_to_node_id(node_id: Optional[bytes]) -> Optional['NodeId']: + from hathor.nanocontracts.storage.patricia_trie import NodeId + if node_id is None: + return node_id + return NodeId(node_id) + + def _get_trie(self, root_id: Optional[bytes]) -> 'PatriciaTrie': + """Return a PatriciaTrie object with a given root.""" + from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie + trie = PatriciaTrie(self._store, root_id=self.bytes_to_node_id(root_id)) + return trie + + def get_block_storage_from_block(self, block: Block) -> NCBlockStorage: + """Return a block storage. If the block is genesis, it will return an empty block storage.""" + meta = block.get_metadata() + if block.is_genesis: + assert meta.nc_block_root_id is None + return self.get_empty_block_storage() + assert meta.nc_block_root_id is not None + return self.get_block_storage(meta.nc_block_root_id) + + def get_block_storage(self, block_root_id: bytes) -> NCBlockStorage: + """Return a non-empty block storage.""" + trie = self._get_trie(block_root_id) + return NCBlockStorage(trie) + + def get_empty_block_storage(self) -> NCBlockStorage: + """Create an empty block storage.""" + trie = self._get_trie(None) + return NCBlockStorage(trie) + + +class NCMemoryStorageFactory(NCStorageFactory): + """Factory to create a memory storage for a contract. + + As it is a memory storage, the factory keeps all contract stored data on + its attribute `self.data`. + """ + + def __init__(self) -> None: + # This attribute stores data from all contracts. + self._store = MemoryNodeTrieStore() + + +class NCRocksDBStorageFactory(NCStorageFactory): + """Factory to create a RocksDB storage for a contract. + """ + + def __init__(self, rocksdb_storage: 'RocksDBStorage') -> None: + # This store keeps data from all contracts. + self._store = RocksDBNodeTrieStore(rocksdb_storage) diff --git a/hathor/nanocontracts/storage/maybedeleted_nc_type.py b/hathor/nanocontracts/storage/maybedeleted_nc_type.py new file mode 100644 index 000000000..efcaf1676 --- /dev/null +++ b/hathor/nanocontracts/storage/maybedeleted_nc_type.py @@ -0,0 +1,81 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TypeVar + +from typing_extensions import override + +from hathor.nanocontracts.nc_types import NCType +from hathor.nanocontracts.storage.types import DeletedKey, DeletedKeyType +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.bool import decode_bool + +T = TypeVar('T') + + +class MaybeDeletedNCType(NCType[T | DeletedKeyType]): + """ Used internally to wrap a NCType or Delete + """ + + __slots__ = ('_value',) + _value: NCType[T] | None + + def __init__(self, wrapped_value: NCType[T] | None) -> None: + self._value = wrapped_value + + @classmethod + def is_deleted_key(cls, data: bytes) -> bool: + """ Shortcut to check if serializing data would result in a `DeletedKey`. + + It is possible to do that because of the serialization layout, it basically boils down to checking the first + byte of data, this is done indirectly but using the same implementation that `MaybeDeletedNCType.deserialize` + uses. + """ + deserializer = Deserializer.build_bytes_deserializer(data) + has_value = decode_bool(deserializer) + return not has_value + + @override + def _check_value(self, value: T | DeletedKeyType, /, *, deep: bool) -> None: + if isinstance(value, DeletedKeyType): + assert value is DeletedKey + return + if deep: + if self._value is None: + raise ValueError('missing inner NCType') + self._value._check_value(value, deep=deep) + + @override + def _serialize(self, serializer: Serializer, value: T | DeletedKeyType, /) -> None: + from hathor.serialization.encoding.bool import encode_bool + if value is DeletedKey: + encode_bool(serializer, False) + else: + if self._value is None: + raise ValueError('missing inner NCType') + assert not isinstance(value, DeletedKeyType) + encode_bool(serializer, True) + self._value.serialize(serializer, value) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> T | DeletedKeyType: + has_value = decode_bool(deserializer) + if has_value: + if self._value is None: + raise ValueError('missing inner NCType') + return self._value.deserialize(deserializer) + else: + return DeletedKey diff --git a/hathor/nanocontracts/storage/node_nc_type.py b/hathor/nanocontracts/storage/node_nc_type.py new file mode 100644 index 000000000..dd9a74ee3 --- /dev/null +++ b/hathor/nanocontracts/storage/node_nc_type.py @@ -0,0 +1,81 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING + +from typing_extensions import override + +from hathor.nanocontracts.nc_types import ( + BytesLikeNCType, + BytesNCType, + DictNCType, + NCType, + OptionalNCType, + VarUint32NCType, +) +from hathor.serialization import Deserializer, Serializer + +if TYPE_CHECKING: + from hathor.nanocontracts.storage.patricia_trie import Node, NodeId + + +class NodeNCType(NCType['Node']): + """ Used internally to (de)serialize a Node into/from the database. + """ + + __slots__ = ('_key', '_length', '_content', '_children', '_id') + _key: NCType[bytes] + _length: NCType[int] + _content: NCType[bytes | None] + _children: NCType[dict[bytes, NodeId]] + # XXX: id is not optional, we're indicating that only nodes with id can be stored + _id: NCType[NodeId] + + def __init__(self) -> None: + from hathor.nanocontracts.storage.patricia_trie import NodeId + self._key = BytesNCType() + self._length = VarUint32NCType() + self._content = OptionalNCType(BytesNCType()) + # XXX: ignores because mypy can't figure out that BytesLikeNCType[NodeId] provides a NCType[NodeId] + self._children = DictNCType(BytesNCType(), BytesLikeNCType(NodeId)) # type: ignore[assignment] + self._id = BytesLikeNCType(NodeId) + + @override + def _check_value(self, value: Node, /, *, deep: bool) -> None: + from hathor.nanocontracts.storage.patricia_trie import Node + if not isinstance(value, Node): + raise TypeError('expected Node class') + + @override + def _serialize(self, serializer: Serializer, node: Node, /) -> None: + # XXX: the order is important, must be the same between de/serialization + self._key.serialize(serializer, node.key) + self._length.serialize(serializer, node.length) + self._content.serialize(serializer, node.content) + self._children.serialize(serializer, node.children) + self._id.serialize(serializer, node.id) + + @override + def _deserialize(self, deserializer: Deserializer, /) -> Node: + from hathor.nanocontracts.storage.patricia_trie import DictChildren, Node + + # XXX: the order is important, must be the same between de/serialization + key = self._key.deserialize(deserializer) + length = self._length.deserialize(deserializer) + content = self._content.deserialize(deserializer) + children = DictChildren(self._children.deserialize(deserializer)) + id_ = self._id.deserialize(deserializer) + return Node(key=key, length=length, content=content, children=children, _id=id_) diff --git a/hathor/nanocontracts/storage/patricia_trie.py b/hathor/nanocontracts/storage/patricia_trie.py new file mode 100644 index 000000000..edb1e0b64 --- /dev/null +++ b/hathor/nanocontracts/storage/patricia_trie.py @@ -0,0 +1,389 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import hashlib +from dataclasses import dataclass, field +from itertools import chain +from typing import Iterable, NamedTuple, NewType, Optional + +from hathor.nanocontracts.storage.backends import NodeTrieStore + +NodeId = NewType('NodeId', bytes) + + +class DictChildren(dict[bytes, NodeId]): + """Data structure to store children of tree nodes.""" + def find_prefix(self, a: bytes) -> Optional[tuple[bytes, NodeId]]: + """Find the key that is a prefix of `a`.""" + # TODO Optimize search. + for key, node_id in self.items(): + if a.startswith(key): + return key, node_id + return None + + def copy(self): + """Return a copy of itself.""" + return DictChildren(self) + + +@dataclass(kw_only=True, slots=True) +class Node: + """This is a node in the Patricia trie. + + Each node can carry an object or not. If a node does not carry an object, its key has never been directly added to + the trie but it was created because some keys have the same prefix. + + Note: We might be able to remove the length. + """ + + key: bytes + length: int + content: Optional[bytes] = None + children: DictChildren = field(default_factory=DictChildren) + _id: Optional[NodeId] = None + + @property + def id(self) -> NodeId: + assert self._id is not None + return self._id + + def copy(self, content: Optional[bytes] = None, children: Optional[DictChildren] = None) -> 'Node': + """Generate a copy of this node except by the id field.""" + content = content if content is not None else self.content + children = children if children is not None else self.children.copy() + return Node(key=self.key, length=self.length, content=content, children=children) + + def calculate_id(self) -> NodeId: + """Calculate a merkle hash to serve as node id. + + This method assumes that all children already have their ids calculated. + """ + h = hashlib.sha1() + h.update(self.key) + if self.content is not None: + h.update(self.content) + sorted_child_ids = sorted(list(self.children.values())) + for child_id in sorted_child_ids: + h.update(child_id) + return NodeId(h.digest()) + + def update_id(self) -> None: + """Update node id.""" + assert self._id is None + self._id = self.calculate_id() + + +class IterDFSNode(NamedTuple): + """Item yielded by `PatriciaTrie.iter_dfs()`.""" + node: Node + height: int + is_leaf: bool + + +class PatriciaTrie: + """This object manages one or more Patricia tries; each Patricia trie is a compressed radix trie. + + All nodes are immutable. So every update will create a new path of nodes from leaves to a new root. + + - The tree structure must be the same regardless of the order the items are added. + """ + + __slots__ = ('_local_changes', '_db', 'root') + + def __init__(self, store: NodeTrieStore, *, root_id: Optional[NodeId] = None) -> None: + self._local_changes: dict[NodeId, Node] = {} + self._db = store + if root_id is None: + self.root: Node = Node(key=b'', length=0) + self.root.update_id() + self._db[self.root.id] = self.root + else: + self.root = self._db[root_id] + assert self.root.id == root_id + + def get_store(self) -> NodeTrieStore: + return self._db + + def commit(self) -> None: + """Flush all local changes from self.root to the database. All other nodes not accessed from self.root + will be discarded. + + This method should be called after all changes have been made to reduce the total number of nodes. + """ + self._commit_dfs(self.root) + self._local_changes = {} + + def _commit_dfs(self, node: Node) -> None: + """Auxiliary method to run a dfs from self.root and flush local changes to the database.""" + self._add_to_db_or_assert(node) + for child_id in node.children.values(): + child = self._local_changes.get(child_id, None) + if child is not None: + self._commit_dfs(child) + else: + assert child_id in self._db + + def _add_to_db_or_assert(self, node: Node) -> None: + """Auxiliary method to either add to the database or check consistency.""" + if node.id in self._db: + assert self._db[node.id] == node + else: + self._db[node.id] = node + + def rollback(self) -> None: + """Discard all local changes.""" + self._local_changes = {} + + def is_dirty(self) -> bool: + """Check if there is any pending local change.""" + return bool(self._local_changes) + + def get_node(self, node_id: NodeId) -> Node: + """Return a node from local changes or the database.""" + if node_id in self._local_changes: + return self._local_changes[node_id] + return self._db[node_id] + + def iter_dfs(self, *, node: Optional[Node] = None) -> Iterable[IterDFSNode]: + """Iterate from a node in a depth-first search.""" + if node is None: + node = self.root + assert node is not None + yield from self._iter_dfs(node=node, depth=0) + + def _iter_dfs(self, *, node: Node, depth: int) -> Iterable[IterDFSNode]: + """Iterate from a node in a depth-first search.""" + is_leaf = bool(not node.children) + yield IterDFSNode(node, depth, is_leaf) + for _, child_id in node.children.items(): + child = self.get_node(child_id) + yield from self._iter_dfs(node=child, depth=depth + 1) + + def _find_nearest_node(self, + key: bytes, + *, + root_id: Optional[NodeId] = None, + log_path: Optional[list[tuple[bytes, Node]]] = None) -> Node: + """Find the nearest node in the trie starting from root_id. + + Notice that it does not have to be a match. The nearest node will share the longest common + prefix with the provided key. + """ + + node: Node + if root_id is None: + node = self.root + else: + node = self.get_node(root_id) + + last_match: bytes = b'' + + while True: + if log_path is not None: + log_path.append((last_match, node)) + + if node.key == key: + return node + + suffix = key[node.length:] + match = node.children.find_prefix(suffix) + if match is not None: + last_match, next_node_id = match + else: + return node + + node = self.get_node(next_node_id) + + @staticmethod + def _find_longest_common_prefix(a: bytes, b: bytes) -> int: + """Return the index of the longest common prefix between `a` and `b`. + + If a and b does not share any prefix, returns -1. + Otherwise, return an integer in the range [0, min(|a|, |b|) - 1]. + """ + n = min(len(a), len(b)) + for i in range(n): + if a[i] != b[i]: + return i - 1 + return n - 1 + + def print_dfs(self, node: Optional[Node] = None, *, depth: int = 0) -> None: + if node is None: + node = self.root + + prefix = ' ' * depth + print(f'{prefix}key: {node.key!r}') + print(f'{prefix}length: {node.length}') + print(f'{prefix}content: {node.content!r}') + print(f'{prefix}n_children: {len(node.children)}') + print(f'{prefix}id: {node.id.hex()}') + print() + for k, child_id in node.children.items(): + print(f' {prefix}--- {k!r} ---') + child = self.get_node(child_id) + self.print_dfs(child, depth=depth + 1) + + def _build_path(self, log_path: list[tuple[bytes, Node]], new_nodes: list[tuple[bytes, Node]]) -> None: + """Build a new path of nodes from the new nodes being added and the current nodes at the trie.""" + prev_suffix: bytes | None = None + + prev_suffix, _ = new_nodes[0] + log_path_copy: list[tuple[bytes, Node]] = [] + for suffix, node in log_path[::-1]: + new_node = node.copy() + assert prev_suffix is not None + del new_node.children[prev_suffix] + log_path_copy.append((suffix, new_node)) + prev_suffix = suffix + + prev: Node | None = None + prev_suffix = None + for suffix, node in chain(new_nodes[::-1], log_path_copy): + if prev is not None: + assert prev.id is not None + assert prev_suffix is not None + node.children[prev_suffix] = prev.id + node.update_id() + self._local_changes[node.id] = node + prev = node + prev_suffix = suffix + + assert prev is not None + self.root = prev + + def _encode_key(self, key: bytes) -> bytes: + """Encode key for internal use. + + This encoding mechanism is utilized to limit the maximum number of children a node can have.""" + return key.hex().encode('ascii') + + def _decode_key(self, key: bytes) -> bytes: + """Decode key from internal format to the provided one. + + During the trie operation, keys are split and they might not be a valid hex string. + In this cases, we append a '0' at the end. + """ + if len(key) % 2 == 1: + key += b'0' + return bytes.fromhex(key.decode('ascii')) + + def _update(self, key: bytes, content: bytes) -> None: + """Internal method to update a key. + + This method never updates a node. It actually copies the node and creates a new path + from that node to the root. + """ + # The new_nodes carries the nodes that currently do not exist in the store. + # These nodes still do not have an id. Their ids will be calculated in the _build_path() method. + new_nodes: list[tuple[bytes, Node]] = [] + + # The log_path is used to backtrack the nearest node to the root. These nodes will be copied in + # the _build_path() method. + log_path: list[tuple[bytes, Node]] = [] + + # First, search for the nearest node to `key`. It either matches the key or is a prefix of the key. + parent = self._find_nearest_node(key, log_path=log_path) + # The last item in the log_path is equal to the returned node. We discard it because the parent + # will be added to the `new_nodes` later. + parent_match, _ = log_path.pop() + + if parent.key == key: + # If the nearest node stores `key`, then we will just copy it and build a new path up to the root. + new_nodes.append((parent_match, parent.copy(content=content))) + self._build_path(log_path, new_nodes) + return + + # If this point is reached, then `parent.key` is a prefix of `key`. So we have to check whether + # any of parent's children shares a prefix with `key` too. Notice that at most one children can + # share a prefix with `key`. + # TODO Optimize this search. + suffix = key[parent.length:] + for k, _v in parent.children.items(): + idx = self._find_longest_common_prefix(suffix, k) + if idx < 0: + # No share with `key`. So skip it. + continue + + # Found the child the shares a prefix with `key`. So we can stop the search. + # Now we have to add a "split node" between the parent and its child. + # + # Before: parent -> child + # After: parent -> split -> child + common_key = key[:parent.length + idx + 1] + common_key_suffix = suffix[:idx + 1] + + split = Node( + key=common_key, + length=len(common_key), + ) + split.children[k[idx + 1:]] = _v + + parent_children_copy = parent.children.copy() + del parent_children_copy[k] + new_nodes.append((parent_match, parent.copy(children=parent_children_copy))) + + # Either the split node's key equals to `key` or not. + if split.key == key: + # If they are equal, the split node will store the object and we are done. + split.content = content + new_nodes.append((common_key_suffix, split)) + self._build_path(log_path, new_nodes) + return + + # Otherwise, the split node will be the parent of the new node that will be created + # to store the object. + parent = split + parent_match = common_key_suffix + break + + # Finally, create the new node that will store the object. + assert parent.key != key + suffix = key[parent.length:] + child = Node( + key=key, + length=len(key), + content=content, + ) + new_nodes.append((parent_match, parent.copy())) + new_nodes.append((suffix, child)) + self._build_path(log_path, new_nodes) + + def _get(self, key: bytes, *, root_id: Optional[NodeId] = None) -> bytes: + """Internal method to get the object-bytes of a key.""" + if key == b'': + raise KeyError('key cannot be empty') + node = self._find_nearest_node(key, root_id=root_id) + if node.key != key: + raise KeyError + if node.content is None: + raise KeyError + return node.content + + def update(self, key: bytes, content: bytes) -> None: + """Update the object of a key. This method might change the root of the trie.""" + real_key = self._encode_key(key) + return self._update(real_key, content) + + def get(self, key: bytes, *, root_id: Optional[NodeId] = None) -> bytes: + """Return the object of a key.""" + real_key = self._encode_key(key) + return self._get(real_key, root_id=root_id) + + def has_key(self, key: bytes, *, root_id: Optional[NodeId] = None) -> bool: + """Return true if the key exists.""" + try: + self.get(key, root_id=root_id) + except KeyError: + return False + return True diff --git a/hathor/nanocontracts/storage/token_proxy.py b/hathor/nanocontracts/storage/token_proxy.py new file mode 100644 index 000000000..107362e3a --- /dev/null +++ b/hathor/nanocontracts/storage/token_proxy.py @@ -0,0 +1,36 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from hathor.nanocontracts.storage.block_storage import NCBlockStorage + from hathor.nanocontracts.types import TokenUid + + +class TokenProxy: + """A proxy used to limit access to only the tokens method of a block storage. + """ + def __init__(self, block_storage: NCBlockStorage) -> None: + self.__block_storage = block_storage + + def has_token(self, token_id: TokenUid) -> bool: + """Proxy to block_storage.has_token().""" + return self.__block_storage.has_token(token_id) + + def create_token(self, token_id: TokenUid, token_name: str, token_symbol: str) -> None: + """Proxy to block_storage.create_token().""" + self.__block_storage.create_token(token_id, token_name, token_symbol) diff --git a/hathor/nanocontracts/storage/types.py b/hathor/nanocontracts/storage/types.py new file mode 100644 index 000000000..4df166c2b --- /dev/null +++ b/hathor/nanocontracts/storage/types.py @@ -0,0 +1,27 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + + +class DeletedKeyType: + pass + + +# Placeholder to mark a key as deleted in a dict. +DeletedKey = DeletedKeyType() + +# Sentinel value to differentiate where a user has provided a default value or not. +# Since _NOT_PROVIDED is a unique object, it is guaranteed not to be equal to any other value. +_NOT_PROVIDED: Any = object() diff --git a/hathor/nanocontracts/types.py b/hathor/nanocontracts/types.py new file mode 100644 index 000000000..f0876edd4 --- /dev/null +++ b/hathor/nanocontracts/types.py @@ -0,0 +1,434 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import inspect +from dataclasses import dataclass +from enum import Enum, unique +from typing import Any, Callable, Generic, NewType, TypeAlias, TypeVar + +from typing_extensions import override + +from hathor.nanocontracts.blueprint_syntax_validation import ( + validate_has_ctx_arg, + validate_has_not_ctx_arg, + validate_has_self_arg, + validate_method_types, +) +from hathor.nanocontracts.exception import BlueprintSyntaxError, NCSerializationError +from hathor.transaction.util import bytes_to_int, int_to_bytes +from hathor.utils.typing import InnerTypeMixin + +# Types to be used by blueprints. +Address = NewType('Address', bytes) +Amount = NewType('Amount', int) +Timestamp = NewType('Timestamp', int) +TokenUid = NewType('TokenUid', bytes) +TxOutputScript = NewType('TxOutputScript', bytes) +VertexId = NewType('VertexId', bytes) +BlueprintId = NewType('BlueprintId', VertexId) +ContractId = NewType('ContractId', VertexId) + +T = TypeVar('T') + +NC_INITIALIZE_METHOD: str = 'initialize' +NC_FALLBACK_METHOD: str = 'fallback' + +NC_ALLOWED_ACTIONS_ATTR = '__nc_allowed_actions' +NC_METHOD_TYPE_ATTR: str = '__nc_method_type' + + +class NCMethodType(Enum): + PUBLIC = 'public' + VIEW = 'view' + FALLBACK = 'fallback' + + +def blueprint_id_from_bytes(data: bytes) -> BlueprintId: + """Create a BlueprintId from a bytes object.""" + return BlueprintId(VertexId(data)) + + +class RawSignedData(InnerTypeMixin[T], Generic[T]): + """A wrapper class to sign data. + + T must be serializable. + """ + + def __init__(self, data: T, script_input: bytes) -> None: + from hathor.nanocontracts.nc_types import make_nc_type_for_return_type as make_nc_type + self.data = data + self.script_input = script_input + self.__nc_type = make_nc_type(self.__inner_type__) + + def __eq__(self, other): + if not isinstance(other, RawSignedData): + return False + if self.data != other.data: + return False + if self.script_input != other.script_input: + return False + return True + + def get_data_bytes(self) -> bytes: + """Return the serialized data.""" + return self.__nc_type.to_bytes(self.data) + + def get_sighash_all_data(self) -> bytes: + """Workaround to be able to pass `self` for ScriptExtras. See the method `checksig`.""" + return self.get_data_bytes() + + def checksig(self, script: bytes) -> bool: + """Check if `self.script_input` satisfies the provided script.""" + from hathor.transaction.exceptions import ScriptError + from hathor.transaction.scripts import ScriptExtras + from hathor.transaction.scripts.execute import execute_eval + full_data = self.script_input + script + log: list[str] = [] + extras = ScriptExtras(tx=self) # type: ignore[arg-type] + try: + execute_eval(full_data, log, extras) + except ScriptError: + return False + else: + return True + + +class SignedData(InnerTypeMixin[T], Generic[T]): + def __init__(self, data: T, script_input: bytes) -> None: + self.data = data + self.script_input = script_input + + def __eq__(self, other): + if not isinstance(other, SignedData): + return False + if self.data != other.data: + return False + if self.script_input != other.script_input: + return False + return True + + def _get_raw_signed_data(self, contract_id: ContractId) -> RawSignedData: + # XXX: for some reason mypy doesn't recognize that self.__inner_type__ is defined even though it should + raw_type: type = tuple[ContractId, self.__inner_type__] # type: ignore[name-defined] + raw_data = (contract_id, self.data) + return RawSignedData[raw_type](raw_data, self.script_input) # type: ignore[valid-type] + + def get_data_bytes(self, contract_id: ContractId) -> bytes: + """Return the serialized data.""" + raw_signed_data = self._get_raw_signed_data(contract_id) + return raw_signed_data.get_data_bytes() + + def checksig(self, contract_id: ContractId, script: bytes) -> bool: + """Check if script_input satisfies the provided script.""" + raw_signed_data = self._get_raw_signed_data(contract_id) + return raw_signed_data.checksig(script) + + +def _set_method_type(fn: Callable, method_type: NCMethodType) -> None: + if hasattr(fn, NC_METHOD_TYPE_ATTR): + raise BlueprintSyntaxError(f'method must be annotated with at most one method type: `{fn.__name__}()`') + setattr(fn, NC_METHOD_TYPE_ATTR, method_type) + + +def _create_decorator_with_allowed_actions( + *, + decorator_body: Callable[[Callable], None], + maybe_fn: Callable | None, + allow_deposit: bool | None, + allow_withdrawal: bool | None, + allow_grant_authority: bool | None, + allow_acquire_authority: bool | None, + allow_actions: list[NCActionType] | None, +) -> Callable: + """Internal utility to create a decorator that sets allowed actions.""" + flags = { + NCActionType.DEPOSIT: allow_deposit, + NCActionType.WITHDRAWAL: allow_withdrawal, + NCActionType.GRANT_AUTHORITY: allow_grant_authority, + NCActionType.ACQUIRE_AUTHORITY: allow_acquire_authority, + } + + def decorator(fn: Callable) -> Callable: + if allow_actions is not None and any(flag is not None for flag in flags.values()): + raise BlueprintSyntaxError(f'use only one of `allow_actions` or per-action flags: `{fn.__name__}()`') + + allowed_actions = set(allow_actions) if allow_actions else set() + allowed_actions.update(action for action, flag in flags.items() if flag) + setattr(fn, NC_ALLOWED_ACTIONS_ATTR, allowed_actions) + + decorator_body(fn) + return fn + + if maybe_fn is not None: + return decorator(maybe_fn) + return decorator + + +def public( + maybe_fn: Callable | None = None, + /, + *, + allow_deposit: bool | None = None, + allow_withdrawal: bool | None = None, + allow_grant_authority: bool | None = None, + allow_acquire_authority: bool | None = None, + allow_actions: list[NCActionType] | None = None, +) -> Callable: + """Decorator to mark a blueprint method as public.""" + def decorator(fn: Callable) -> None: + annotation_name = 'public' + forbidden_methods = {NC_FALLBACK_METHOD} + _set_method_type(fn, NCMethodType.PUBLIC) + + if fn.__name__ in forbidden_methods: + raise BlueprintSyntaxError(f'`{fn.__name__}` method cannot be annotated with @{annotation_name}') + + validate_has_self_arg(fn, annotation_name) + validate_method_types(fn) + validate_has_ctx_arg(fn, annotation_name) + + return _create_decorator_with_allowed_actions( + decorator_body=decorator, + maybe_fn=maybe_fn, + allow_deposit=allow_deposit, + allow_withdrawal=allow_withdrawal, + allow_grant_authority=allow_grant_authority, + allow_acquire_authority=allow_acquire_authority, + allow_actions=allow_actions, + ) + + +def view(fn: Callable) -> Callable: + """Decorator to mark a blueprint method as view (read-only).""" + annotation_name = 'view' + forbidden_methods = {NC_INITIALIZE_METHOD, NC_FALLBACK_METHOD} + _set_method_type(fn, NCMethodType.VIEW) + + if fn.__name__ in forbidden_methods: + raise BlueprintSyntaxError(f'`{fn.__name__}` method cannot be annotated with @{annotation_name}') + + validate_has_self_arg(fn, annotation_name) + validate_has_not_ctx_arg(fn, annotation_name) + validate_method_types(fn) + return fn + + +def fallback( + maybe_fn: Callable | None = None, + /, + *, + allow_deposit: bool | None = None, + allow_withdrawal: bool | None = None, + allow_grant_authority: bool | None = None, + allow_acquire_authority: bool | None = None, + allow_actions: list[NCActionType] | None = None, +) -> Callable: + """Decorator to mark a blueprint method as fallback. The method must also be called `fallback`.""" + def decorator(fn: Callable) -> None: + annotation_name = 'fallback' + _set_method_type(fn, NCMethodType.FALLBACK) + + if fn.__name__ != NC_FALLBACK_METHOD: + raise BlueprintSyntaxError(f'@{annotation_name} method must be called `fallback`: `{fn.__name__}()`') + + validate_has_self_arg(fn, annotation_name) + validate_method_types(fn) + validate_has_ctx_arg(fn, annotation_name) + + arg_spec = inspect.getfullargspec(fn) + msg = f'@{annotation_name} method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + + if len(arg_spec.args) < 4: + raise BlueprintSyntaxError(msg) + + third_arg = arg_spec.args[2] + fourth_arg = arg_spec.args[3] + + if arg_spec.annotations[third_arg] is not str or arg_spec.annotations[fourth_arg] is not NCArgs: + raise BlueprintSyntaxError(msg) + + return _create_decorator_with_allowed_actions( + decorator_body=decorator, + maybe_fn=maybe_fn, + allow_deposit=allow_deposit, + allow_withdrawal=allow_withdrawal, + allow_grant_authority=allow_grant_authority, + allow_acquire_authority=allow_acquire_authority, + allow_actions=allow_actions, + ) + + +@unique +class NCActionType(Enum): + """ + Types of interactions a transaction might have with a contract. + Check the respective dataclasses below for more info. + """ + DEPOSIT = 1 + WITHDRAWAL = 2 + GRANT_AUTHORITY = 3 + ACQUIRE_AUTHORITY = 4 + + def __str__(self) -> str: + return self.name + + def to_bytes(self) -> bytes: + return int_to_bytes(number=self.value, size=1) + + @staticmethod + def from_bytes(data: bytes) -> NCActionType: + return NCActionType(bytes_to_int(data)) + + +@dataclass(slots=True, frozen=True, kw_only=True) +class BaseAction: + """The base dataclass for all NC actions. Shouldn't be instantiated directly.""" + token_uid: TokenUid + + @property + def type(self) -> NCActionType: + """The respective NCActionType for each NCAction.""" + action_types: dict[type[BaseAction], NCActionType] = { + NCDepositAction: NCActionType.DEPOSIT, + NCWithdrawalAction: NCActionType.WITHDRAWAL, + NCGrantAuthorityAction: NCActionType.GRANT_AUTHORITY, + NCAcquireAuthorityAction: NCActionType.ACQUIRE_AUTHORITY, + } + + if action_type := action_types.get(type(self)): + return action_type + + raise NotImplementedError(f'unknown action type {type(self)}') + + @property + def name(self) -> str: + """The action name.""" + return str(self.type) + + def to_json(self) -> dict[str, Any]: + """ + Convert this action to a json dict. + + >>> NCDepositAction(token_uid=TokenUid(b'\x01'), amount=123).to_json() + {'type': 'deposit', 'token_uid': '01', 'amount': 123} + >>> NCWithdrawalAction(token_uid=TokenUid(b'\x01'), amount=123).to_json() + {'type': 'withdrawal', 'token_uid': '01', 'amount': 123} + >>> NCGrantAuthorityAction(token_uid=TokenUid(b'\x01'), mint=True, melt=False).to_json() + {'type': 'grant_authority', 'token_uid': '01', 'mint': True, 'melt': False} + >>> NCAcquireAuthorityAction(token_uid=TokenUid(b'\x01'), mint=False, melt=True).to_json() + {'type': 'acquire_authority', 'token_uid': '01', 'mint': False, 'melt': True} + """ + return dict( + type=self.name.lower(), + token_uid=self.token_uid.hex(), + ) + + +@dataclass(slots=True, frozen=True, kw_only=True) +class BaseTokenAction(BaseAction): + """The base dataclass for all token-related NC actions. Shouldn't be instantiated directly.""" + amount: int + + @override + def to_json(self) -> dict[str, Any]: + json_dict = super(BaseTokenAction, self).to_json() + return dict( + **json_dict, + amount=self.amount, + ) + + +@dataclass(slots=True, frozen=True, kw_only=True) +class BaseAuthorityAction(BaseAction): + """The base dataclass for all authority-related NC actions. Shouldn't be instantiated directly.""" + mint: bool + melt: bool + + def __post_init__(self) -> None: + """Validate the token uid.""" + from hathor.conf.settings import HATHOR_TOKEN_UID + from hathor.nanocontracts.exception import NCInvalidAction + if self.token_uid == HATHOR_TOKEN_UID: + raise NCInvalidAction(f'{self.name} action cannot be executed on HTR token') + + @override + def to_json(self) -> dict[str, Any]: + json_dict = super(BaseAuthorityAction, self).to_json() + return dict( + **json_dict, + mint=self.mint, + melt=self.melt, + ) + + +@dataclass(slots=True, frozen=True, kw_only=True) +class NCDepositAction(BaseTokenAction): + """Deposit tokens into the contract.""" + + +@dataclass(slots=True, frozen=True, kw_only=True) +class NCWithdrawalAction(BaseTokenAction): + """Withdraw tokens from the contract.""" + + +@dataclass(slots=True, frozen=True, kw_only=True) +class NCGrantAuthorityAction(BaseAuthorityAction): + """Grant an authority to the contract.""" + + +@dataclass(slots=True, frozen=True, kw_only=True) +class NCAcquireAuthorityAction(BaseAuthorityAction): + """ + Acquire an authority stored in a contract to create authority outputs or mint/melt tokens in the tx, + or to store and use in a caller contract. + """ + + +"""A sum type representing all possible nano contract actions.""" +NCAction: TypeAlias = ( + NCDepositAction + | NCWithdrawalAction + | NCGrantAuthorityAction + | NCAcquireAuthorityAction +) + + +@dataclass(slots=True, frozen=True) +class NCRawArgs: + args_bytes: bytes + + def __str__(self) -> str: + return self.args_bytes.hex() + + def __repr__(self) -> str: + return f"NCRawArgs('{str(self)}')" + + def try_parse_as(self, arg_types: tuple[type, ...]) -> tuple[Any, ...] | None: + from hathor.nanocontracts.method import ArgsOnly + try: + args_parser = ArgsOnly.from_arg_types(arg_types) + return args_parser.deserialize_args_bytes(self.args_bytes) + except (NCSerializationError, TypeError): + return None + + +@dataclass(slots=True, frozen=True) +class NCParsedArgs: + args: tuple[Any, ...] + kwargs: dict[str, Any] + + +NCArgs: TypeAlias = NCRawArgs | NCParsedArgs diff --git a/hathor/nanocontracts/utils.py b/hathor/nanocontracts/utils.py new file mode 100644 index 000000000..cc681dc40 --- /dev/null +++ b/hathor/nanocontracts/utils.py @@ -0,0 +1,141 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import hashlib +from types import ModuleType +from typing import Callable + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec +from pycoin.key.Key import Key as PycoinKey + +from hathor.crypto.util import decode_address, get_address_from_public_key_bytes, get_public_key_bytes_compressed +from hathor.nanocontracts.types import NC_METHOD_TYPE_ATTR, BlueprintId, ContractId, NCMethodType, TokenUid, VertexId +from hathor.transaction.headers import NanoHeader +from hathor.util import not_none + +CHILD_CONTRACT_ID_PREFIX: bytes = b'child-contract' +CHILD_TOKEN_ID_PREFIX: bytes = b'child-token' + + +def is_nc_public_method(method: Callable) -> bool: + """Return True if the method is nc_public.""" + return getattr(method, NC_METHOD_TYPE_ATTR, None) is NCMethodType.PUBLIC + + +def is_nc_view_method(method: Callable) -> bool: + """Return True if the method is nc_view.""" + return getattr(method, NC_METHOD_TYPE_ATTR, None) is NCMethodType.VIEW + + +def is_nc_fallback_method(method: Callable) -> bool: + """Return True if the method is nc_fallback.""" + return getattr(method, NC_METHOD_TYPE_ATTR, None) is NCMethodType.FALLBACK + + +def load_builtin_blueprint_for_ocb(filename: str, blueprint_name: str, module: ModuleType | None = None) -> str: + """Get blueprint code from a file.""" + import io + import os + + from hathor.nanocontracts import blueprints + + module = module or blueprints + cur_dir = os.path.dirname(not_none(module.__file__)) + filepath = os.path.join(not_none(cur_dir), filename) + code_text = io.StringIO() + with open(filepath, 'r') as nc_file: + for line in nc_file.readlines(): + code_text.write(line) + code_text.write(f'__blueprint__ = {blueprint_name}\n') + res = code_text.getvalue() + code_text.close() + return res + + +def derive_child_contract_id(parent_id: ContractId, salt: bytes, blueprint_id: BlueprintId) -> ContractId: + """Derives the contract id for a nano contract created by another (parent) contract.""" + h = hashlib.sha256() + h.update(CHILD_CONTRACT_ID_PREFIX) + h.update(parent_id) + h.update(salt) + h.update(blueprint_id) + return ContractId(VertexId(h.digest())) + + +def derive_child_token_id(parent_id: ContractId, token_symbol: str) -> TokenUid: + """Derive the token id for a token created by a (parent) contract.""" + h = hashlib.sha256() + h.update(CHILD_TOKEN_ID_PREFIX) + h.update(parent_id) + h.update(token_symbol.encode('utf-8')) + return TokenUid(VertexId(h.digest())) + + +def sign_openssl(nano_header: NanoHeader, privkey: ec.EllipticCurvePrivateKey) -> None: + """Sign this nano header using a privkey from the cryptography lib.""" + from hathor.transaction import Transaction + from hathor.transaction.scripts import P2PKH + + pubkey = privkey.public_key() + pubkey_bytes = get_public_key_bytes_compressed(pubkey) + nano_header.nc_address = get_address_from_public_key_bytes(pubkey_bytes) + + assert isinstance(nano_header.tx, Transaction) + data = nano_header.tx.get_sighash_all_data() + signature = privkey.sign(data, ec.ECDSA(hashes.SHA256())) + + nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) + + +def sign_pycoin(nano_header: NanoHeader, privkey: PycoinKey) -> None: + """Sign this nano header using a privkey from the pycoin lib.""" + from hathor.transaction import Transaction + from hathor.transaction.scripts import P2PKH + + pubkey_bytes = privkey.sec() + nano_header.nc_address = get_address_from_public_key_bytes(pubkey_bytes) + + assert isinstance(nano_header.tx, Transaction) + data = nano_header.tx.get_sighash_all_data() + data_hash = hashlib.sha256(data).digest() + signature = privkey.sign(data_hash) + + nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) + + +def sign_openssl_multisig( + nano_header: NanoHeader, + *, + required_count: int, + redeem_pubkey_bytes: list[bytes], + sign_privkeys: list[ec.EllipticCurvePrivateKey], +) -> None: + """Sign this nano header with multisig using privkeys from the cryptography lib.""" + from hathor.transaction import Transaction + from hathor.transaction.scripts import MultiSig + from hathor.wallet.util import generate_multisig_address, generate_multisig_redeem_script + + redeem_script = generate_multisig_redeem_script(required_count, redeem_pubkey_bytes) + multisig_address_b58 = generate_multisig_address(redeem_script) + multisig_address = decode_address(multisig_address_b58) + nano_header.nc_address = multisig_address + + assert isinstance(nano_header.tx, Transaction) + data = nano_header.tx.get_sighash_all_data() + signatures = [privkey.sign(data, ec.ECDSA(hashes.SHA256())) for privkey in sign_privkeys] + + nano_header.nc_script = MultiSig.create_input_data(redeem_script, signatures) diff --git a/hathor/nanocontracts/vertex_data.py b/hathor/nanocontracts/vertex_data.py new file mode 100644 index 000000000..09065ec51 --- /dev/null +++ b/hathor/nanocontracts/vertex_data.py @@ -0,0 +1,177 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from typing_extensions import Self + +from hathor.types import TokenUid, VertexId + +if TYPE_CHECKING: + from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput, TxVersion + from hathor.transaction.headers.nano_header import NanoHeader + + +def _get_txin_output(vertex: BaseTransaction, txin: TxInput) -> TxOutput | None: + """Return the output that txin points to.""" + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + if vertex.storage is None: + return None + + try: + vertex2 = vertex.storage.get_transaction(txin.tx_id) + except TransactionDoesNotExist: + assert False, f'missing dependency: {txin.tx_id.hex()}' + + assert len(vertex2.outputs) > txin.index, 'invalid output index' + + txin_output = vertex2.outputs[txin.index] + return txin_output + + +@dataclass(frozen=True, slots=True, kw_only=True) +class VertexData: + version: TxVersion + hash: bytes + nonce: int + signal_bits: int + weight: float + inputs: tuple[TxInputData, ...] + outputs: tuple[TxOutputData, ...] + tokens: tuple[TokenUid, ...] + parents: tuple[VertexId, ...] + block: BlockData + headers: tuple[HeaderData, ...] + + @classmethod + def create_from_vertex(cls, vertex: BaseTransaction) -> Self: + from hathor.transaction import Transaction + from hathor.transaction.headers.nano_header import NanoHeader + + inputs = tuple( + TxInputData.create_from_txin(txin, _get_txin_output(vertex, txin)) + for txin in vertex.inputs + ) + outputs = tuple(TxOutputData.create_from_txout(txout) for txout in vertex.outputs) + parents = tuple(vertex.parents) + tokens: tuple[TokenUid, ...] = tuple() + vertex_meta = vertex.get_metadata() + if vertex_meta.first_block is not None: + assert vertex.storage is not None + assert vertex_meta.first_block is not None + block = vertex.storage.get_block(vertex_meta.first_block) + block_data = BlockData.create_from_block(block) + else: + # XXX: use dummy data instead + block_data = BlockData(hash=VertexId(b''), timestamp=0, height=0) + + assert isinstance(vertex, Transaction) + headers_data: list[HeaderData] = [] + has_nano_header = False + for header in vertex.headers: + if isinstance(header, NanoHeader): + assert not has_nano_header, 'code should guarantee NanoHeader only appears once' + headers_data.append(NanoHeaderData.create_from_nano_header(header)) + has_nano_header = True + + original_tokens = getattr(vertex, 'tokens', None) + if original_tokens is not None: + # XXX Should we add HTR_TOKEN_ID as first token? + tokens = tuple(original_tokens) + + return cls( + version=vertex.version, + hash=vertex.hash, + nonce=vertex.nonce, + signal_bits=vertex.signal_bits, + weight=vertex.weight, + inputs=inputs, + outputs=outputs, + tokens=tokens, + parents=parents, + block=block_data, + headers=tuple(headers_data), + ) + + +@dataclass(frozen=True, slots=True, kw_only=True) +class TxInputData: + tx_id: VertexId + index: int + data: bytes + info: TxOutputData | None + + @classmethod + def create_from_txin(cls, txin: TxInput, txin_output: TxOutput | None) -> Self: + return cls( + tx_id=txin.tx_id, + index=txin.index, + data=txin.data, + info=TxOutputData.create_from_txout(txin_output) if txin_output else None, + ) + + +@dataclass(frozen=True, slots=True, kw_only=True) +class TxOutputData: + value: int + script: bytes + token_data: int + + @classmethod + def create_from_txout(cls, txout: TxOutput) -> Self: + return cls( + value=txout.value, + script=txout.script, + token_data=txout.token_data, + ) + + +@dataclass(frozen=True, slots=True, kw_only=True) +class BlockData: + hash: VertexId + timestamp: int + height: int + + @classmethod + def create_from_block(cls, block: Block) -> Self: + return cls( + hash=block.hash, + timestamp=block.timestamp, + height=block.get_height(), + ) + + +class HeaderData: + """Marker class, represents an arbitrary vertex-header.""" + + +@dataclass(frozen=True, slots=True, kw_only=True) +class NanoHeaderData(HeaderData): + nc_seqnum: int + nc_id: VertexId + nc_method: str + nc_args_bytes: bytes + + @classmethod + def create_from_nano_header(cls, nc_header: NanoHeader) -> Self: + return cls( + nc_seqnum=nc_header.nc_seqnum, + nc_id=nc_header.nc_id, + nc_method=nc_header.nc_method, + nc_args_bytes=nc_header.nc_args_bytes, + ) diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 56371da69..422deb7eb 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -555,7 +555,6 @@ def connect_to_peer_from_connection_queue(self) -> None: """ It is called by the `lc_connect` looping call and tries to connect to a new peer. """ if not self.new_connection_from_queue: - self.log.debug('connection queue is empty') return assert self.manager is not None self.log.debug('connect to peer from connection queue') diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index b582fcb77..15cf4e8a5 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -43,6 +43,10 @@ logger = get_logger() cpu = get_cpu_profiler() +MISBEHAVIOR_KEY = 'misbehavior' +MISBEHAVIOR_THRESHOLD = 100 +MISBEHAVIOR_WINDOW = 3600 # decay in 1h + class HathorProtocol: """ Implements Hathor Peer-to-Peer Protocol. An instance of this class is @@ -172,6 +176,10 @@ def __init__( max_size=self._settings.MAX_UNVERIFIED_PEERS_PER_CONN, ) + # Misbehavior score that is increased after protocol violations. + self._misbehavior_score = RateLimiter(self.reactor) + self._misbehavior_score.set_limit(MISBEHAVIOR_KEY, MISBEHAVIOR_THRESHOLD, MISBEHAVIOR_WINDOW) + # Protocol version is initially unset self.sync_version = None @@ -245,6 +253,13 @@ def on_idle_timeout(self) -> None: # We cannot use self.disconnect() because it will wait to send pending data. self.disconnect(force=True) + def increase_misbehavior_score(self, *, weight: int) -> None: + """Increase misbehavior score and acts if the threshold is reached.""" + if not self._misbehavior_score.add_hit(MISBEHAVIOR_KEY, weight): + score = self._misbehavior_score.get_limit(MISBEHAVIOR_KEY) + self.log.warn('connection closed due to misbehavior', score=score) + self.send_error_and_close_connection('Misbehavior score is too high') + def on_connect(self) -> None: """ Executed when the connection is established. """ @@ -315,11 +330,6 @@ def recv_message(self, cmd: ProtocolMessages, payload: str) -> None: self.peer.info.last_seen = now if not self.ratelimit.add_hit(self.RateLimitKeys.GLOBAL): - # XXX: on Python 3.11 the result of the following expression: - # '{}'.format(HathorProtocol.RateLimitKeys.GLOBAL) - # is not 'global' but 'RateLimitKeys.GLOBAL', even though the enum value *is* a string, but it seems - # that something like `str(value)` is called which results in a different value (usually not the case - # for regular strings, but it is for enum+str), using `enum_variant.value` side-steps this problem self.state.send_throttle(self.RateLimitKeys.GLOBAL.value) return diff --git a/hathor/p2p/rate_limiter.py b/hathor/p2p/rate_limiter.py index defbd9342..ddc7b3c2f 100644 --- a/hathor/p2p/rate_limiter.py +++ b/hathor/p2p/rate_limiter.py @@ -71,13 +71,14 @@ def add_hit(self, key: str, weight: int = 1) -> bool: return True max_hits, window_seconds = self.keys[key] + now = self.reactor.seconds() + if key not in self.hits: - self.hits[key] = RateLimiterLimit(weight, self.reactor.seconds()) - return True + self.hits[key] = RateLimiterLimit(0, now) hits, latest_time = self.hits[key] - dt = self.reactor.seconds() - latest_time + dt = now - latest_time # rate = max_hits / window_seconds (hits per second) # x = dt * rate diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index d96fd91e4..bc24fe3e8 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -22,7 +22,7 @@ from structlog import get_logger from twisted.internet.defer import Deferred, inlineCallbacks -from twisted.internet.task import LoopingCall, deferLater +from twisted.internet.task import LoopingCall from hathor.conf.settings import HathorSettings from hathor.exception import InvalidNewTransaction @@ -171,8 +171,9 @@ def __init__( # Maximum running time to consider a sync stale. self.max_running_time: int = 30 * 60 # seconds - # Whether we propagate transactions or not - self._is_relaying = False + # Whether vertex relay is enabled or not. + self._outbound_relay_enabled = False # from us to the peer + self._inbound_relay_enabled = False # from the peer to us # Whether to sync with this peer self._is_enabled: bool = False @@ -224,7 +225,7 @@ def send_tx_to_peer_if_possible(self, tx: BaseTransaction) -> None: # blocks as priorities to help miners get the blocks as fast as we can # We decided not to implement this right now because we already have some producers # being used in the sync algorithm and the code was becoming a bit too complex - if self._is_relaying: + if self._outbound_relay_enabled: self.send_data(tx) def is_started(self) -> bool: @@ -515,6 +516,7 @@ def send_relay(self, *, enable: bool = True) -> None: """ Send a RELAY message. """ self.log.debug('send_relay', enable=enable) + self._inbound_relay_enabled = enable self.send_message(ProtocolMessages.RELAY, json.dumps(enable)) def handle_relay(self, payload: str) -> None: @@ -522,11 +524,11 @@ def handle_relay(self, payload: str) -> None: """ if not payload: # XXX: "legacy" nothing means enable - self._is_relaying = True + self._outbound_relay_enabled = True else: val = json.loads(payload) if isinstance(val, bool): - self._is_relaying = val + self._outbound_relay_enabled = val else: self.protocol.send_error_and_close_connection('RELAY: invalid value') return @@ -613,17 +615,11 @@ def find_best_common_block(self, return lo @inlineCallbacks - def on_block_complete(self, blk: Block, vertex_list: list[BaseTransaction]) -> Generator[Any, Any, None]: + def on_block_complete(self, blk: Block, vertex_list: list[Transaction]) -> Generator[Any, Any, None]: """This method is called when a block and its transactions are downloaded.""" # Note: Any vertex and block could have already been added by another concurrent syncing peer. try: - for tx in vertex_list: - if not self.tx_storage.transaction_exists(tx.hash): - self.vertex_handler.on_new_vertex(tx, fails_silently=False) - yield deferLater(self.reactor, 0, lambda: None) - - if not self.tx_storage.transaction_exists(blk.hash): - self.vertex_handler.on_new_vertex(blk, fails_silently=False) + yield self.vertex_handler.on_new_block(blk, deps=vertex_list) except InvalidNewTransaction: self.protocol.send_error_and_close_connection('invalid vertex received') @@ -1038,6 +1034,7 @@ def handle_transaction(self, payload: str) -> None: tx.storage = self.tx_storage assert self._tx_streaming_client is not None + assert isinstance(tx, Transaction) self._tx_streaming_client.handle_transaction(tx) @inlineCallbacks @@ -1128,6 +1125,12 @@ def handle_get_data(self, payload: str) -> None: def handle_data(self, payload: str) -> None: """ Handle a DATA message. """ + if not self._inbound_relay_enabled: + # Unsolicited vertex. + # Should we have a grace period when incoming relay is disabled? Is the decay mechanism enough? + self.protocol.increase_misbehavior_score(weight=1) + return + if not payload: return part1, _, part2 = payload.partition(' ') @@ -1166,17 +1169,18 @@ def handle_data(self, payload: str) -> None: # XXX: maybe we could add a hash blacklist and punish peers propagating known bad txs self.tx_storage.compare_bytes_with_local_tx(tx) return - else: - # If we have not requested the data, it is a new transaction being propagated - # in the network, thus, we propagate it as well. - if self.tx_storage.can_validate_full(tx): - self.log.debug('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) - try: - success = self.vertex_handler.on_new_vertex(tx, fails_silently=False) - if success: - self.protocol.connections.send_tx_to_peers(tx) - except InvalidNewTransaction: - self.protocol.send_error_and_close_connection('invalid vertex received') - else: - self.log.debug('skipping tx received in real time from peer', - tx=tx.hash_hex, peer=self.protocol.get_peer_id()) + + # Unsolicited vertices must be fully validated. + if not self.tx_storage.can_validate_full(tx): + self.log.debug('skipping tx received in real time from peer', + tx=tx.hash_hex, peer=self.protocol.get_peer_id()) + return + + # Finally, it is either an unsolicited new transaction or block. + self.log.debug('tx received in real time from peer', tx=tx.hash_hex, peer=self.protocol.get_peer_id()) + try: + success = self.vertex_handler.on_new_relayed_vertex(tx) + if success: + self.protocol.connections.send_tx_to_peers(tx) + except InvalidNewTransaction: + self.protocol.send_error_and_close_connection('invalid vertex received') diff --git a/hathor/p2p/sync_v2/blockchain_streaming_client.py b/hathor/p2p/sync_v2/blockchain_streaming_client.py index e78ec056b..295e59c7e 100644 --- a/hathor/p2p/sync_v2/blockchain_streaming_client.py +++ b/hathor/p2p/sync_v2/blockchain_streaming_client.py @@ -126,7 +126,7 @@ def handle_blocks(self, blk: Block) -> None: if self.tx_storage.can_validate_full(blk): try: - self.vertex_handler.on_new_vertex(blk, fails_silently=False) + self.vertex_handler.on_new_block(blk, deps=[]) except HathorError: self.fails(InvalidVertexError(blk.hash.hex())) return diff --git a/hathor/p2p/sync_v2/mempool.py b/hathor/p2p/sync_v2/mempool.py index 03651642e..02aa3f458 100644 --- a/hathor/p2p/sync_v2/mempool.py +++ b/hathor/p2p/sync_v2/mempool.py @@ -19,7 +19,7 @@ from twisted.internet.defer import Deferred, inlineCallbacks from hathor.exception import InvalidNewTransaction -from hathor.transaction import BaseTransaction +from hathor.transaction import Transaction if TYPE_CHECKING: from hathor.p2p.sync_v2.agent import NodeBlockSync @@ -95,7 +95,7 @@ def _unsafe_run(self) -> Generator[Deferred, Any, bool]: while self.missing_tips: self.log.debug('We have missing tips! Let\'s start!', missing_tips=[x.hex() for x in self.missing_tips]) tx_id = next(iter(self.missing_tips)) - tx: BaseTransaction = yield self.sync_agent.get_tx(tx_id) + tx: Transaction = yield self.sync_agent.get_tx(tx_id) # Stack used by the DFS in the dependencies. # We use a deque for performance reasons. self.log.debug('start mempool DSF', tx=tx.hash_hex) @@ -106,7 +106,7 @@ def _unsafe_run(self) -> Generator[Deferred, Any, bool]: return False @inlineCallbacks - def _dfs(self, stack: deque[BaseTransaction]) -> Generator[Deferred, Any, None]: + def _dfs(self, stack: deque[Transaction]) -> Generator[Deferred, Any, None]: """DFS method.""" while stack: tx = stack[-1] @@ -123,7 +123,7 @@ def _dfs(self, stack: deque[BaseTransaction]) -> Generator[Deferred, Any, None]: if len(stack) > self.MAX_STACK_LENGTH: stack.popleft() - def _next_missing_dep(self, tx: BaseTransaction) -> Optional[bytes]: + def _next_missing_dep(self, tx: Transaction) -> Optional[bytes]: """Get the first missing dependency found of tx.""" assert not tx.is_block for txin in tx.inputs: @@ -134,13 +134,13 @@ def _next_missing_dep(self, tx: BaseTransaction) -> Optional[bytes]: return parent return None - def _add_tx(self, tx: BaseTransaction) -> None: + def _add_tx(self, tx: Transaction) -> None: """Add tx to the DAG.""" self.missing_tips.discard(tx.hash) if self.tx_storage.transaction_exists(tx.hash): return try: - success = self.vertex_handler.on_new_vertex(tx, fails_silently=False) + success = self.vertex_handler.on_new_mempool_transaction(tx) if success: self.sync_agent.protocol.connections.send_tx_to_peers(tx) except InvalidNewTransaction: diff --git a/hathor/p2p/sync_v2/transaction_streaming_client.py b/hathor/p2p/sync_v2/transaction_streaming_client.py index e784a41cc..2cb1b22c1 100644 --- a/hathor/p2p/sync_v2/transaction_streaming_client.py +++ b/hathor/p2p/sync_v2/transaction_streaming_client.py @@ -25,9 +25,10 @@ UnexpectedVertex, ) from hathor.p2p.sync_v2.streamers import StreamEnd -from hathor.transaction import BaseTransaction +from hathor.transaction import BaseTransaction, Transaction from hathor.transaction.exceptions import HathorError, TxValidationError from hathor.types import VertexId +from hathor.verification.verification_params import VerificationParams if TYPE_CHECKING: from hathor.p2p.sync_v2.agent import NodeBlockSync @@ -46,6 +47,9 @@ def __init__(self, self.protocol = self.sync_agent.protocol self.tx_storage = self.sync_agent.tx_storage self.verification_service = self.protocol.node.verification_service + # XXX: since it's not straightforward to get the correct block, it's OK to just disable checkdatasig counting, + # it will be correctly enabled when doing a full validation anyway. + self.verification_params = VerificationParams(enable_checkdatasig_count=False) self.reactor = sync_agent.reactor self.log = logger.new(peer=self.protocol.get_short_peer_id()) @@ -66,7 +70,7 @@ def __init__(self, self._tx_max_quantity = limit # Queue of transactions waiting to be processed. - self._queue: deque[BaseTransaction] = deque() + self._queue: deque[Transaction] = deque() # Keeps the response code if the streaming has ended. self._response_code: Optional[StreamEnd] = None @@ -79,7 +83,7 @@ def __init__(self, # In-memory database of transactions already received but still # waiting for dependencies. - self._db: dict[VertexId, BaseTransaction] = {} + self._db: dict[VertexId, Transaction] = {} self._existing_deps: set[VertexId] = set() self._prepare_block(self.partial_blocks[0]) @@ -103,7 +107,7 @@ def fails(self, reason: 'StreamingError') -> None: return self._deferred.errback(reason) - def handle_transaction(self, tx: BaseTransaction) -> None: + def handle_transaction(self, tx: Transaction) -> None: """This method is called by the sync agent when a TRANSACTION message is received.""" if self._deferred.called: return @@ -147,13 +151,13 @@ def process_queue(self) -> Generator[Any, Any, None]: self.reactor.callLater(0, self.process_queue) @inlineCallbacks - def _process_transaction(self, tx: BaseTransaction) -> Generator[Any, Any, None]: + def _process_transaction(self, tx: Transaction) -> Generator[Any, Any, None]: """Process transaction.""" # Run basic verification. if not tx.is_genesis: try: - self.verification_service.verify_basic(tx) + self.verification_service.verify_basic(tx, self.verification_params) except TxValidationError as e: self.fails(InvalidVertexError(repr(e))) return @@ -177,6 +181,7 @@ def _process_transaction(self, tx: BaseTransaction) -> Generator[Any, Any, None] self._update_dependencies(tx) + assert isinstance(tx, Transaction) self._db[tx.hash] = tx if not self._waiting_for: @@ -191,9 +196,9 @@ def _process_transaction(self, tx: BaseTransaction) -> Generator[Any, Any, None] if self._tx_received % 100 == 0: self.log.debug('tx streaming in progress', txs_received=self._tx_received) - def _update_dependencies(self, tx: BaseTransaction) -> None: + def _update_dependencies(self, vertex: BaseTransaction) -> None: """Update _existing_deps and _waiting_for with the dependencies.""" - for dep in tx.get_all_dependencies(): + for dep in vertex.get_all_dependencies(): if self.tx_storage.transaction_exists(dep) or dep in self._db: self._existing_deps.add(dep) else: diff --git a/hathor/pubsub.py b/hathor/pubsub.py index 8a4e25d4a..395123a12 100644 --- a/hathor/pubsub.py +++ b/hathor/pubsub.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from collections import defaultdict, deque from enum import Enum from typing import TYPE_CHECKING, Any, Callable, Optional @@ -24,6 +26,7 @@ from hathor.utils.zope import verified_cast if TYPE_CHECKING: + from hathor.nanocontracts.nc_exec_logs import NCEvent from hathor.transaction import BaseTransaction, Block logger = get_logger() @@ -138,6 +141,8 @@ class HathorEvents(Enum): REORG_FINISHED = 'reorg:finished' + NC_EVENT = 'nc:event' + class EventArguments: """Simple object for storing event arguments. @@ -149,6 +154,7 @@ class EventArguments: old_best_block: 'Block' new_best_block: 'Block' common_block: 'Block' + nc_event: NCEvent def __init__(self, **kwargs: Any) -> None: for key, value in kwargs.items(): diff --git a/hathor/reward_lock/reward_lock.py b/hathor/reward_lock/reward_lock.py index 85b6871e8..458e308c1 100644 --- a/hathor/reward_lock/reward_lock.py +++ b/hathor/reward_lock/reward_lock.py @@ -12,18 +12,20 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from typing import TYPE_CHECKING, Iterator, Optional -from hathor.conf.settings import HathorSettings from hathor.transaction import Block from hathor.util import not_none if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings from hathor.transaction.storage.vertex_storage_protocol import VertexStorageProtocol from hathor.transaction.transaction import RewardLockedInfo, Transaction -def iter_spent_rewards(tx: 'Transaction', storage: 'VertexStorageProtocol') -> Iterator[Block]: +def iter_spent_rewards(tx: Transaction, storage: VertexStorageProtocol) -> Iterator[Block]: """Iterate over all the rewards being spent, assumes tx has been verified.""" for input_tx in tx.inputs: spent_tx = storage.get_vertex(input_tx.tx_id) @@ -32,7 +34,7 @@ def iter_spent_rewards(tx: 'Transaction', storage: 'VertexStorageProtocol') -> I yield spent_tx -def is_spent_reward_locked(settings: HathorSettings, tx: 'Transaction') -> bool: +def is_spent_reward_locked(settings: HathorSettings, tx: Transaction) -> bool: """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx itself, and not the inherited `min_height`""" return get_spent_reward_locked_info(settings, tx, not_none(tx.storage)) is not None @@ -40,9 +42,9 @@ def is_spent_reward_locked(settings: HathorSettings, tx: 'Transaction') -> bool: def get_spent_reward_locked_info( settings: HathorSettings, - tx: 'Transaction', - storage: 'VertexStorageProtocol', -) -> Optional['RewardLockedInfo']: + tx: Transaction, + storage: VertexStorageProtocol, +) -> Optional[RewardLockedInfo]: """Check if any input block reward is locked, returning the locked information if any, or None if they are all unlocked.""" from hathor.transaction.transaction import RewardLockedInfo @@ -54,7 +56,7 @@ def get_spent_reward_locked_info( return None -def get_minimum_best_height(storage: 'VertexStorageProtocol') -> int: +def get_minimum_best_height(storage: VertexStorageProtocol) -> int: """Return the height of the current best block that shall be used for `min_height` verification.""" import math diff --git a/hathor/serialization/__init__.py b/hathor/serialization/__init__.py new file mode 100644 index 000000000..65e1626a0 --- /dev/null +++ b/hathor/serialization/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .deserializer import Deserializer +from .exceptions import BadDataError, OutOfDataError, SerializationError, TooLongError, UnsupportedTypeError +from .serializer import Serializer + +__all__ = [ + 'Serializer', + 'Deserializer', + 'SerializationError', + 'UnsupportedTypeError', + 'TooLongError', + 'OutOfDataError', + 'BadDataError', +] diff --git a/hathor/serialization/adapters/__init__.py b/hathor/serialization/adapters/__init__.py new file mode 100644 index 000000000..8667c7684 --- /dev/null +++ b/hathor/serialization/adapters/__init__.py @@ -0,0 +1,24 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .generic_adapter import GenericDeserializerAdapter, GenericSerializerAdapter +from .max_bytes import MaxBytesDeserializer, MaxBytesExceededError, MaxBytesSerializer + +__all__ = [ + 'GenericDeserializerAdapter', + 'GenericSerializerAdapter', + 'MaxBytesDeserializer', + 'MaxBytesExceededError', + 'MaxBytesSerializer', +] diff --git a/hathor/serialization/adapters/generic_adapter.py b/hathor/serialization/adapters/generic_adapter.py new file mode 100644 index 000000000..9d7540bce --- /dev/null +++ b/hathor/serialization/adapters/generic_adapter.py @@ -0,0 +1,110 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from types import TracebackType +from typing import Generic, TypeVar + +from typing_extensions import Self, override + +from hathor.serialization.deserializer import Deserializer +from hathor.serialization.serializer import Serializer + +from ..types import Buffer + +S = TypeVar('S', bound=Serializer) +D = TypeVar('D', bound=Deserializer) + + +class GenericSerializerAdapter(Serializer, Generic[S]): + inner: S + + def __init__(self, serializer: S) -> None: + self.inner = serializer + + @override + def finalize(self) -> Buffer: + return self.inner.finalize() + + @override + def cur_pos(self) -> int: + return self.inner.cur_pos() + + @override + def write_byte(self, data: int) -> None: + self.inner.write_byte(data) + + @override + def write_bytes(self, data: Buffer) -> None: + self.inner.write_bytes(data) + + # allow using this adapter as a context manager: + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + pass + + +class GenericDeserializerAdapter(Deserializer, Generic[D]): + inner: D + + def __init__(self, deserializer: D) -> None: + self.inner = deserializer + + @override + def finalize(self) -> None: + return self.inner.finalize() + + @override + def is_empty(self) -> bool: + return self.inner.is_empty() + + @override + def peek_byte(self) -> int: + return self.inner.peek_byte() + + @override + def peek_bytes(self, n: int, *, exact: bool = True) -> Buffer: + return self.inner.peek_bytes(n, exact=exact) + + @override + def read_byte(self) -> int: + return self.inner.read_byte() + + @override + def read_bytes(self, n: int, *, exact: bool = True) -> Buffer: + return self.inner.read_bytes(n, exact=exact) + + @override + def read_all(self) -> Buffer: + return self.inner.read_all() + + # allow using this adapter as a context manager: + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, + ) -> None: + pass diff --git a/hathor/serialization/adapters/max_bytes.py b/hathor/serialization/adapters/max_bytes.py new file mode 100644 index 000000000..e3cf0f455 --- /dev/null +++ b/hathor/serialization/adapters/max_bytes.py @@ -0,0 +1,91 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TypeVar + +from typing_extensions import override + +from hathor.serialization.deserializer import Deserializer +from hathor.serialization.exceptions import SerializationError +from hathor.serialization.serializer import Serializer + +from ..types import Buffer +from .generic_adapter import GenericDeserializerAdapter, GenericSerializerAdapter + +S = TypeVar('S', bound=Serializer) +D = TypeVar('D', bound=Deserializer) + + +class MaxBytesExceededError(SerializationError): + """ This error is raised when the adapted serializer reached its maximum bytes write/read. + + After this exception is raised the adapted serializer cannot be used anymore. Handlers of this exception are + expected to either: bubble up the exception (or an equivalente exception), or return an error. Handlers should not + try to write again on the same serializer. + + It is possible that the inner serializer is still usable, but the point where the serialized stopped writing or + reading might leave the rest of the data unusable, so for that reason it should be considered a failed + (de)serialization overall, and not simply a failed "read/write" operation. + """ + pass + + +class MaxBytesSerializer(GenericSerializerAdapter[S]): + def __init__(self, serializer: S, max_bytes: int) -> None: + super().__init__(serializer) + self._bytes_left = max_bytes + + def _check_update_exceeds(self, write_size: int) -> None: + self._bytes_left -= write_size + if self._bytes_left < 0: + raise MaxBytesExceededError + + @override + def write_byte(self, data: int) -> None: + self._check_update_exceeds(1) + super().write_byte(data) + + @override + def write_bytes(self, data: Buffer) -> None: + data_view = memoryview(data) + self._check_update_exceeds(len(data_view)) + super().write_bytes(data_view) + + +class MaxBytesDeserializer(GenericDeserializerAdapter[D]): + def __init__(self, deserializer: D, max_bytes: int) -> None: + super().__init__(deserializer) + self._bytes_left = max_bytes + + def _check_update_exceeds(self, read_size: int) -> None: + self._bytes_left -= read_size + if self._bytes_left < 0: + raise MaxBytesExceededError + + @override + def read_byte(self) -> int: + self._check_update_exceeds(1) + return super().read_byte() + + @override + def read_bytes(self, n: int, *, exact: bool = True) -> Buffer: + self._check_update_exceeds(n) + return super().read_bytes(n, exact=exact) + + @override + def read_all(self) -> Buffer: + result = super().read_bytes(self._bytes_left, exact=False) + if not self.is_empty(): + raise MaxBytesExceededError + return result diff --git a/hathor/serialization/bytes_deserializer.py b/hathor/serialization/bytes_deserializer.py new file mode 100644 index 000000000..1a26ec7b2 --- /dev/null +++ b/hathor/serialization/bytes_deserializer.py @@ -0,0 +1,76 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing_extensions import override + +from .deserializer import Deserializer +from .exceptions import OutOfDataError +from .types import Buffer + +_EMPTY_VIEW = memoryview(b'') + + +class BytesDeserializer(Deserializer): + """Simple implementation of a Deserializer to parse values from a byte sequence. + + This implementation maintains a memoryview that is shortened as the bytes are read. + """ + + def __init__(self, data: Buffer) -> None: + self._view = memoryview(data) + + @override + def finalize(self) -> None: + if not self.is_empty(): + raise ValueError('trailing data') + del self._view + + @override + def is_empty(self) -> bool: + # XXX: least amount of OPs, "not" converts to bool with the correct semantics of "is empty" + return not self._view + + @override + def peek_byte(self) -> int: + if not len(self._view): + raise OutOfDataError('not enough bytes to read') + return self._view[0] + + @override + def peek_bytes(self, n: int, *, exact: bool = True) -> memoryview: + if n < 0: + raise ValueError('value cannot be negative') + if exact and len(self._view) < n: + raise OutOfDataError('not enough bytes to read') + return self._view[:n] + + @override + def read_byte(self) -> int: + b = self.peek_byte() + self._view = self._view[1:] + return b + + @override + def read_bytes(self, n: int, *, exact: bool = True) -> memoryview: + b = self.peek_bytes(n, exact=exact) + if exact and len(self._view) < n: + raise OutOfDataError('not enough bytes to read') + self._view = self._view[n:] + return b + + @override + def read_all(self) -> memoryview: + b = self._view + self._view = _EMPTY_VIEW + return b diff --git a/hathor/serialization/bytes_serializer.py b/hathor/serialization/bytes_serializer.py new file mode 100644 index 000000000..067e9920b --- /dev/null +++ b/hathor/serialization/bytes_serializer.py @@ -0,0 +1,53 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing_extensions import override + +from .serializer import Serializer +from .types import Buffer + + +class BytesSerializer(Serializer): + """Simple implementation of Serializer to write to memory. + + This implementation defers joining everything until finalize is called, before that every write is stored as a + memoryview in a list. + """ + + def __init__(self) -> None: + self._parts: list[memoryview] = [] + self._pos: int = 0 + + @override + def finalize(self) -> memoryview: + result = memoryview(b''.join(self._parts)) + del self._parts + del self._pos + return result + + @override + def cur_pos(self) -> int: + return self._pos + + @override + def write_byte(self, data: int) -> None: + # int.to_bytes checks for correct range + self._parts.append(memoryview(int.to_bytes(data))) + self._pos += 1 + + @override + def write_bytes(self, data: Buffer) -> None: + part = memoryview(data) + self._parts.append(part) + self._pos += len(part) diff --git a/hathor/serialization/compound_encoding/__init__.py b/hathor/serialization/compound_encoding/__init__.py new file mode 100644 index 000000000..b2e44d889 --- /dev/null +++ b/hathor/serialization/compound_encoding/__init__.py @@ -0,0 +1,50 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This module was made to hold compound encoding implementations. + +Compound encoders are encoders that are generic in some way and will delegate the encoding of some portion to another +encoder. For example a `value: Optional[T]` encoder is prepared to encode the value and delegate the rest to an encoder +that knows how to encode `T`. + +The general organization should be that each submodule `x` deals with a single type and look like this: + + def encode_x(serializer: Serializer, value: ValueType, ...config params...) -> None: + ... + + def decode_x(deserializer: Deserializer, ...config params...) -> ValueType: + ... + +The "config params" are optional and specific to each encoder. Submodules should not have to take into consideration +how types are mapped to encoders. +""" + +from typing import Protocol, TypeVar + +from hathor.serialization.deserializer import Deserializer +from hathor.serialization.serializer import Serializer + +T_co = TypeVar('T_co', covariant=True) +T_contra = TypeVar('T_contra', contravariant=True) + + +class Decoder(Protocol[T_co]): + def __call__(self, deserializer: Deserializer, /) -> T_co: + ... + + +class Encoder(Protocol[T_contra]): + def __call__(self, serializer: Serializer, value: T_contra, /) -> None: + ... diff --git a/hathor/serialization/compound_encoding/collection.py b/hathor/serialization/compound_encoding/collection.py new file mode 100644 index 000000000..dc534fa8a --- /dev/null +++ b/hathor/serialization/compound_encoding/collection.py @@ -0,0 +1,64 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +A collection is basically any value that has a known size and is iterable. + +Layout: [N: unsigned leb128][value_0]...[value_N] + +>>> from hathor.serialization.encoding.utf8 import encode_utf8, decode_utf8 +>>> se = Serializer.build_bytes_serializer() +>>> value = ['foobar', 'π', '😎', 'test'] +>>> encode_collection(se, value, encode_utf8) +>>> bytes(se.finalize()).hex() +'0406666f6f62617202cf8004f09f988e0474657374' + +Breakdown of the result: + + 04: 4 in leb128, the total length + 06666f6f626172: 'foobar' with length prefix) + 02cf80: 'π' (with length prefix) + 04f09f988e: '😎' (with length prefix) + 0474657374: 'test' (with length prefix) + +When decoding, the builder can be any compabile collection, in the previous example a `list` was encoded, but when +decoding a `tuple` could be used, it only matters that the collection can be initialized with an `Iterable[T]`. + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('0406666f6f62617202cf8004f09f988e0474657374')) +>>> decode_collection(de, decode_utf8, tuple) +('foobar', 'π', '😎', 'test') +>>> de.finalize() +""" + +from collections.abc import Collection, Iterable +from typing import Callable, TypeVar + +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.leb128 import decode_leb128, encode_leb128 + +from . import Decoder, Encoder + +T = TypeVar('T') +R = TypeVar('R', bound=Collection) + + +def encode_collection(serializer: Serializer, values: Collection[T], encoder: Encoder[T]) -> None: + encode_leb128(serializer, len(values), signed=False) + for value in values: + encoder(serializer, value) + + +def decode_collection(deserializer: Deserializer, decoder: Decoder[T], builder: Callable[[Iterable[T]], R]) -> R: + length = decode_leb128(deserializer, signed=False) + return builder(decoder(deserializer) for _ in range(length)) diff --git a/hathor/serialization/compound_encoding/mapping.py b/hathor/serialization/compound_encoding/mapping.py new file mode 100644 index 000000000..31eb74c4a --- /dev/null +++ b/hathor/serialization/compound_encoding/mapping.py @@ -0,0 +1,86 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +Encoding a mapping is equivalent to encoding a collection of 2-tuples. + +Layout: [N: unsigned leb128][key_0][value_0]...[key_N][value_N] + +>>> from hathor.serialization.encoding.utf8 import encode_utf8, decode_utf8 +>>> from hathor.serialization.encoding.bool import encode_bool, decode_bool +>>> se = Serializer.build_bytes_serializer() +>>> value = { +... 'foo': False, +... 'bar': True, +... 'foobar': True, +... 'baz': False, +... } +>>> encode_mapping(se, value, encode_utf8, encode_bool) +>>> bytes(se.finalize()).hex() +'0403666f6f00036261720106666f6f626172010362617a00' + +Breakdown of the result: + + 04: 4 in leb128, the total length + 03666f6f: 'foo' with length prefix + 00: False + 03626172: 'bar' with length prefix + 01: True + 06666f6f626172: 'foobar' with length prefix + 01: True + 0362617a: 'baz' with length prefix + 00: False + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('0403666f6f00036261720106666f6f626172010362617a00')) +>>> decode_mapping(de, decode_utf8, decode_bool, dict) +{'foo': False, 'bar': True, 'foobar': True, 'baz': False} +>>> de.finalize() +""" + +from collections.abc import Iterable, Mapping +from typing import Callable, TypeVar + +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.leb128 import decode_leb128, encode_leb128 + +from . import Decoder, Encoder + +KT = TypeVar('KT') +VT = TypeVar('VT') +R = TypeVar('R', bound=Mapping) + + +def encode_mapping( + serializer: Serializer, + values_mapping: Mapping[KT, VT], + key_encoder: Encoder[KT], + value_encoder: Encoder[VT], +) -> None: + encode_leb128(serializer, len(values_mapping), signed=False) + for key, value in values_mapping.items(): + key_encoder(serializer, key) + value_encoder(serializer, value) + + +def decode_mapping( + deserializer: Deserializer, + key_decoder: Decoder[KT], + value_decoder: Decoder[VT], + mapping_builder: Callable[[Iterable[tuple[KT, VT]]], R], +) -> R: + size = decode_leb128(deserializer, signed=False) + return mapping_builder( + (key_decoder(deserializer), value_decoder(deserializer)) + for _ in range(size) + ) diff --git a/hathor/serialization/compound_encoding/optional.py b/hathor/serialization/compound_encoding/optional.py new file mode 100644 index 000000000..11c5aa8eb --- /dev/null +++ b/hathor/serialization/compound_encoding/optional.py @@ -0,0 +1,68 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +An optional type is encoded the same way as a collection with max length of 1. + +Layout: + + [0x00] when None + [0x01][value] when not None + +>>> from hathor.serialization.encoding.utf8 import encode_utf8, decode_utf8 +>>> se = Serializer.build_bytes_serializer() +>>> encode_optional(se, 'foobar', encode_utf8) +>>> bytes(se.finalize()).hex() +'0106666f6f626172' + +>>> se = Serializer.build_bytes_serializer() +>>> encode_optional(se, None, encode_utf8) +>>> bytes(se.finalize()).hex() +'00' + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('0106666f6f626172')) +>>> decode_optional(de, decode_utf8) +'foobar' +>>> de.finalize() + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('00')) +>>> str(decode_optional(de, decode_utf8)) +'None' +>>> de.finalize() +""" + +from typing import Optional, TypeVar + +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.bool import decode_bool, encode_bool + +from . import Decoder, Encoder + +T = TypeVar('T') + + +def encode_optional(serializer: Serializer, value: Optional[T], encoder: Encoder[T]) -> None: + if value is None: + encode_bool(serializer, False) + else: + encode_bool(serializer, True) + encoder(serializer, value) + + +def decode_optional(deserializer: Deserializer, decoder: Decoder[T]) -> Optional[T]: + has_value = decode_bool(deserializer) + if has_value: + return decoder(deserializer) + else: + return None diff --git a/hathor/serialization/compound_encoding/signed_data.py b/hathor/serialization/compound_encoding/signed_data.py new file mode 100644 index 000000000..482ba4ff2 --- /dev/null +++ b/hathor/serialization/compound_encoding/signed_data.py @@ -0,0 +1,59 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +A `SignedData[T]` value is encoded the same way as a `tuple[T, bytes]`. + +Layout: [value_T][script_bytes]. + +>>> from hathor.serialization.encoding.utf8 import encode_utf8, decode_utf8 +>>> se = Serializer.build_bytes_serializer() +>>> value = SignedData[str]('😎', b'foobar') # foobar is not a valid script but it doesn't matter +>>> encode_signed_data(se, value, encode_utf8) +>>> bytes(se.finalize()).hex() +'04f09f988e06666f6f626172' + +Breakdown of the result: + + 04f09f988e: '😎' + 06666f6f626172: b'foobar' + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('04f09f988e06666f6f626172')) +>>> decode_signed_data(de, decode_utf8, str) +SignedData[str](data='😎', script_input=b'foobar') +>>> de.finalize() +""" + +from typing import TypeVar + +from hathor.nanocontracts.types import SignedData +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.encoding.bytes import decode_bytes, encode_bytes + +from . import Decoder, Encoder + +T = TypeVar('T') + + +def encode_signed_data(serializer: Serializer, value: SignedData[T], encoder: Encoder[T]) -> None: + assert isinstance(value, SignedData) + encoder(serializer, value.data) + encode_bytes(serializer, value.script_input) + + +def decode_signed_data(deserializer: Deserializer, decoder: Decoder[T], inner_type: type[T]) -> SignedData[T]: + data = decoder(deserializer) + script_input = decode_bytes(deserializer) + # XXX: ignore valid-type because mypy doesn't recognize dynamic type annotations, but it's correct + return SignedData[inner_type](data, script_input) # type: ignore[valid-type] diff --git a/hathor/serialization/compound_encoding/tuple.py b/hathor/serialization/compound_encoding/tuple.py new file mode 100644 index 000000000..627f2c48b --- /dev/null +++ b/hathor/serialization/compound_encoding/tuple.py @@ -0,0 +1,66 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +In Python a tuple type can be used in annotations in 2 different ways: + +1. `tuple[A, B, C]`: known fixed length and heterogeneous types +2. `tuple[X, ...]`: variable length and homogeneous type + +This module only implements encoding of the first case, the second case can be encoded using the collection encoder. + +There actually isn't a "format" per-se, the encoding of `tuple[A, B, C]` is just the encoding of A concatenated with B +concatenated with C. So this compound encoder is basically a shortcut that can be used by cases that already have a +tuple of values and a matching tuple of encoders of those values. + +>>> from hathor.serialization.encoding.utf8 import encode_utf8, decode_utf8 +>>> from hathor.serialization.encoding.bool import encode_bool, decode_bool +>>> from hathor.serialization.encoding.bytes import decode_bytes, encode_bytes +>>> se = Serializer.build_bytes_serializer() +>>> values = ('foobar', False, b'test') +>>> encode_tuple(se, values, (encode_utf8, encode_bool, encode_bytes)) +>>> bytes(se.finalize()).hex() +'06666f6f626172000474657374' + +Breakdown of the result: + + 06666f6f626172: 'foobar' + 00: False + 0474657374: b'test' + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('06666f6f626172000474657374')) +>>> decode_tuple(de, (decode_utf8, decode_bool, decode_bytes)) +('foobar', False, b'test') +""" + +from typing import Any + +from typing_extensions import TypeVarTuple, Unpack + +from hathor.serialization import Deserializer, Serializer + +from . import Decoder, Encoder + +Ts = TypeVarTuple('Ts') + + +def encode_tuple(serializer: Serializer, values: tuple[Unpack[Ts]], encoders: tuple[Encoder[Any], ...]) -> None: + assert len(values) == len(encoders) + # mypy can't track tuple element-wise mapping yet — safe due to length check above + for value, encoder in zip(values, encoders): # type: ignore + encoder(serializer, value) + + +def decode_tuple(deserializer: Deserializer, decoders: tuple[Decoder[Any], ...]) -> tuple[Unpack[Ts]]: + return tuple(decoder(deserializer) for decoder in decoders) diff --git a/hathor/serialization/consts.py b/hathor/serialization/consts.py new file mode 100644 index 000000000..13888d7bf --- /dev/null +++ b/hathor/serialization/consts.py @@ -0,0 +1,16 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DEFAULT_LEB128_MAX_BYTES: int = 4 +DEFAULT_BYTES_MAX_LENGTH: int = 2**16 # 64KiB diff --git a/hathor/serialization/deserializer.py b/hathor/serialization/deserializer.py new file mode 100644 index 000000000..0fcecaf42 --- /dev/null +++ b/hathor/serialization/deserializer.py @@ -0,0 +1,109 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import struct +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, TypeVar, overload + +from typing_extensions import Self + +from .types import Buffer + +if TYPE_CHECKING: + from .adapters import MaxBytesDeserializer + from .bytes_deserializer import BytesDeserializer + +T = TypeVar('T') + + +class Deserializer(ABC): + def finalize(self) -> None: + """Check that all bytes were consumed, the deserializer cannot be used after this.""" + raise TypeError('this deserializer does not support finalization') + + @staticmethod + def build_bytes_deserializer(data: Buffer) -> BytesDeserializer: + from .bytes_deserializer import BytesDeserializer + return BytesDeserializer(data) + + @abstractmethod + def is_empty(self) -> bool: + raise NotImplementedError + + @abstractmethod + def peek_byte(self) -> int: + """Read a single byte but don't consume from buffer.""" + raise NotImplementedError + + @abstractmethod + def peek_bytes(self, n: int, *, exact: bool = True) -> Buffer: + """Read n single byte but don't consume from buffer.""" + raise NotImplementedError + + def peek_struct(self, format: str) -> tuple[Any, ...]: + size = struct.calcsize(format) + data = self.peek_bytes(size) + return struct.unpack(format, data) + + @abstractmethod + def read_byte(self) -> int: + """Read a single byte as unsigned int.""" + raise NotImplementedError + + @abstractmethod + def read_bytes(self, n: int, *, exact: bool = True) -> Buffer: + """Read n bytes, when exact=True it errors if there isn't enough data""" + # XXX: this is a blanket implementation that is an example of the behavior, this implementation has to be + # explicitly used if needed + def iter_bytes(): + for _ in range(n): + if not exact and self.is_empty(): + break + yield self.read_byte() + return bytes(iter_bytes()) + + @abstractmethod + def read_all(self) -> Buffer: + """Read all bytes until the reader is empty.""" + # XXX: it is recommended that implementors of Deserializer specialize this implementation + def iter_bytes(): + while not self.is_empty(): + yield self.read_byte() + return bytes(iter_bytes()) + + def read_struct(self, format: str) -> tuple[Any, ...]: + size = struct.calcsize(format) + data = self.read_bytes(size) + return struct.unpack_from(format, data) + + def with_max_bytes(self, max_bytes: int) -> MaxBytesDeserializer[Self]: + """Helper method to wrap the current deserializer with MaxBytesDeserializer.""" + from .adapters import MaxBytesDeserializer + return MaxBytesDeserializer(self, max_bytes) + + @overload + def with_optional_max_bytes(self, max_bytes: None) -> Self: + ... + + @overload + def with_optional_max_bytes(self, max_bytes: int) -> MaxBytesDeserializer[Self]: + ... + + def with_optional_max_bytes(self, max_bytes: int | None) -> Self | MaxBytesDeserializer[Self]: + """Helper method to optionally wrap the current deserializer.""" + if max_bytes is None: + return self + return self.with_max_bytes(max_bytes) diff --git a/hathor/serialization/encoding/__init__.py b/hathor/serialization/encoding/__init__.py new file mode 100644 index 000000000..11f3b0954 --- /dev/null +++ b/hathor/serialization/encoding/__init__.py @@ -0,0 +1,32 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This module was made to hold simple encoding implementations. + +Simple in this context means "not compound". For example a fixed-size int encoding can have sized/signed parameters, +but not a have a generic function or type as a parameter. For compound types (optionals, lists, dicts, ...) the encoder +should be in the `encoding_compound` module. + +The general organization should be that each submodule `x` deals with a single type and look like this: + + def encode_x(serializer: Serializer, value: ValueType, ...config params...) -> None: + ... + + def decode_x(deserializer: Deserializer, ...config params...) -> ValueType: + ... + +The "config params" are optional and specific to each encoder. Submodules should not have to take into consideration +how types are mapped to encoders. +""" diff --git a/hathor/serialization/encoding/bool.py b/hathor/serialization/encoding/bool.py new file mode 100644 index 000000000..878cbde03 --- /dev/null +++ b/hathor/serialization/encoding/bool.py @@ -0,0 +1,78 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +This module implements encoding a boolean value using 1 byte. + +The format is trivial and extremely simple: + +- `False` maps to `b'\x00'` +- `True` maps to `b'\x01'` +- any other byte value is invalid + +>>> se = Serializer.build_bytes_serializer() +>>> encode_bool(se, False) +>>> bytes(se.finalize()) +b'\x00' + +>>> se = Serializer.build_bytes_serializer() +>>> encode_bool(se, True) +>>> bytes(se.finalize()) +b'\x01' + +>>> de = Deserializer.build_bytes_deserializer(b'\x00') +>>> decode_bool(de) +False +>>> de.finalize() + +>>> de = Deserializer.build_bytes_deserializer(b'\x01') +>>> decode_bool(de) +True +>>> de.finalize() + +>>> de = Deserializer.build_bytes_deserializer(b'\x02') +>>> try: +... decode_bool(de) +... except ValueError as e: +... print(*e.args) +b'\x02' is not a valid boolean + +>>> de = Deserializer.build_bytes_deserializer(b'\x01test') +>>> decode_bool(de) +True +>>> bytes(de.read_all()) +b'test' +""" + +from hathor.serialization import Deserializer, Serializer + + +def encode_bool(serializer: Serializer, value: bool) -> None: + """ Encodes a boolean value using 1 byte. + """ + assert isinstance(value, bool) + serializer.write_byte(0x01 if value else 0x00) + + +def decode_bool(deserializer: Deserializer) -> bool: + """ Decodes a boolean value from 1 byte. + """ + i = deserializer.read_byte() + if i == 0: + return False + elif i == 1: + return True + else: + raw = bytes([i]) + raise ValueError(f'{raw!r} is not a valid boolean') diff --git a/hathor/serialization/encoding/bytes.py b/hathor/serialization/encoding/bytes.py new file mode 100644 index 000000000..c61eb9e10 --- /dev/null +++ b/hathor/serialization/encoding/bytes.py @@ -0,0 +1,83 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +This modules implements encoding of byte sequence by prefixing it with the length of the sequence encoded as a LEB128 +unsigned integer. + +>>> se = Serializer.build_bytes_serializer() +>>> encode_bytes(se, b'test') # will prepend b'\x04' before writing b'test' +>>> bytes(se.finalize()).hex() +'0474657374' + +>>> se = Serializer.build_bytes_serializer() +>>> raw_data = b'test' * 32 +>>> len(raw_data) +128 +>>> encode_bytes(se, raw_data) # prepends b'\x80\x01' before raw_data +>>> encoded_data = bytes(se.finalize()) +>>> len(encoded_data) +130 +>>> encoded_data[:10].hex() +'80017465737474657374' + +>>> de = Deserializer.build_bytes_deserializer(encoded_data) # that we encoded before +>>> decoded_data = decode_bytes(de) +>>> de.finalize() # called to assert we've consumed everything +>>> decoded_data == raw_data +True +>>> decoded_data[:8] +b'testtest' + +>>> de = Deserializer.build_bytes_deserializer(b'\x04test') +>>> decode_bytes(de) +b'test' +>>> de.finalize() + +>>> de = Deserializer.build_bytes_deserializer(b'\x04testfoo') +>>> _ = decode_bytes(de) +>>> try: +... de.finalize() +... except ValueError as e: +... print(*e.args) +trailing data + +>>> de = Deserializer.build_bytes_deserializer(b'\x04testfoo') +>>> _ = decode_bytes(de) +>>> bytes(de.read_all()) +b'foo' +""" + +from hathor.serialization import Deserializer, Serializer + +from .leb128 import decode_leb128, encode_leb128 + + +def encode_bytes(serializer: Serializer, data: bytes) -> None: + """ Encodes a byte-sequence adding a length prefix. + + This modules's docstring has more details and examples. + """ + assert isinstance(data, bytes) + encode_leb128(serializer, len(data), signed=False) + serializer.write_bytes(data) + + +def decode_bytes(deserializer: Deserializer) -> bytes: + """ Decodes a byte-sequnce with a length prefix. + + This modules's docstring has more details and examples. + """ + size = decode_leb128(deserializer, signed=False) + return bytes(deserializer.read_bytes(size)) diff --git a/hathor/serialization/encoding/int.py b/hathor/serialization/encoding/int.py new file mode 100644 index 000000000..593eebe31 --- /dev/null +++ b/hathor/serialization/encoding/int.py @@ -0,0 +1,60 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This module implements encoding of integers with a fixed size, the size and signedness are parametrized. + +The encoding format itself is a standard big-endian format. + +>>> se = Serializer.build_bytes_serializer() +>>> encode_int(se, 0, length=1, signed=True) # writes 00 +>>> encode_int(se, 255, length=1, signed=False) # writes ff +>>> encode_int(se, 1234, length=2, signed=True) # writes 04d2 +>>> encode_int(se, -1234, length=2, signed=True) # writes fb2e +>>> bytes(se.finalize()).hex() +'00ff04d2fb2e' + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('00ff04d2fb2e')) +>>> decode_int(de, length=1, signed=True) # reads 00 +0 +>>> decode_int(de, length=1, signed=False) # reads ff +255 +>>> decode_int(de, length=2, signed=True) # reads 04d2 +1234 +>>> decode_int(de, length=2, signed=True) # reads fb2e +-1234 +""" + +from hathor.serialization import Deserializer, Serializer + + +def encode_int(serializer: Serializer, number: int, *, length: int, signed: bool) -> None: + """ Encode an int using the given byte-length and signedness. + + This modules's docstring has more details and examples. + """ + try: + data = int.to_bytes(number, length, byteorder='big', signed=signed) + except OverflowError: + raise ValueError('too big to encode') + serializer.write_bytes(data) + + +def decode_int(deserializer: Deserializer, *, length: int, signed: bool) -> int: + """ Decode an int using the given byte-length and signedness. + + This modules's docstring has more details and examples. + """ + data = deserializer.read_bytes(length) + return int.from_bytes(data, byteorder='big', signed=signed) diff --git a/hathor/serialization/encoding/leb128.py b/hathor/serialization/encoding/leb128.py new file mode 100644 index 000000000..b4399b052 --- /dev/null +++ b/hathor/serialization/encoding/leb128.py @@ -0,0 +1,93 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This module implements LEB128 for signed integers. + +LEB128 or Little Endian Base 128 is a variable-length code compression used to store arbitrarily large +integers in a small number of bytes. LEB128 is used in the DWARF debug file format and the WebAssembly +binary encoding for all integer literals. + +References: +- https://en.wikipedia.org/wiki/LEB128 +- https://dwarfstd.org/doc/DWARF5.pdf +- https://webassembly.github.io/spec/core/binary/values.html#integers + +This module implements LEB128 encoding/decoding using the standard 1-byte block split into 1-bit for continuation and +7-bits for data. The data can be either a signed or unsigned integer. + +>>> se = Serializer.build_bytes_serializer() +>>> se.write_bytes(b'test') # writes 74657374 +>>> encode_leb128(se, 0, signed=True) # writes 00 +>>> encode_leb128(se, 624485, signed=True) # writes e58e26 +>>> encode_leb128(se, -123456, signed=True) # writes c0bb78 +>>> bytes(se.finalize()).hex() +'7465737400e58e26c0bb78' + +>>> data = bytes.fromhex('00 e58e26 c0bb78 74657374') +>>> de = Deserializer.build_bytes_deserializer(data) +>>> decode_leb128(de, signed=True) # reads 00 +0 +>>> decode_leb128(de, signed=True) # reads e58e26 +624485 +>>> decode_leb128(de, signed=True) # reads c0bb78 +-123456 +>>> bytes(de.read_all()) # reads 74657374 +b'test' +>>> de.finalize() +""" + +from hathor.serialization import Deserializer, Serializer + + +def encode_leb128(serializer: Serializer, value: int, *, signed: bool) -> None: + """ Encodes an integer using LEB128. + + Caller must explicitly choose `signed=True` or `signed=False`. + + This module's docstring has more details on LEB128 and examples. + """ + if not signed and value < 0: + raise ValueError('cannot encode value <0 as unsigend') + while True: + byte = value & 0b0111_1111 + value >>= 7 + if signed: + cont = (value == 0 and (byte & 0b0100_0000) == 0) or (value == -1 and (byte & 0b0100_0000) != 0) + else: + cont = (value == 0 and (byte & 0b1000_0000) == 0) + if cont: + serializer.write_byte(byte) + break + serializer.write_byte(byte | 0b1000_0000) + + +def decode_leb128(deserializer: Deserializer, *, signed: bool) -> int: + """ Decodes a LEB128-encoded integer. + + Caller must explicitly choose `signed=True` or `signed=False`. + + This module's docstring has more details on LEB128 and examples. + """ + result = 0 + shift = 0 + while True: + byte = deserializer.read_byte() + result |= (byte & 0b0111_1111) << shift + shift += 7 + assert shift % 7 == 0 + if (byte & 0b1000_0000) == 0: + if signed and (byte & 0b0100_0000) != 0: + return result | -(1 << shift) + return result diff --git a/hathor/serialization/encoding/output_value.py b/hathor/serialization/encoding/output_value.py new file mode 100644 index 000000000..fab67de4e --- /dev/null +++ b/hathor/serialization/encoding/output_value.py @@ -0,0 +1,127 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +This module implements our custom output-value encoding for integers. + +Our custom encoding format consists of the following: + +- if value <= 2**31 - 1, use 4-bytes and encode it as a signed positive integer +- if value > 2**31 - 1, use 8-bytes and encode it as a signed negative integer + +When decoding, we peek at the first byte to determine the sign and whether read 4 or 8 bytes. + +Examples: + +>>> se = Serializer.build_bytes_serializer() +>>> try: +... encode_output_value(se, 0) +... except ValueError as e: +... print(*e.args) +Number must be strictly positive + +>>> try: +... encode_output_value(se, -1) +... except ValueError as e: +... print(*e.args) +Number must not be negative + +>>> se = Serializer.build_bytes_serializer() +>>> encode_output_value(se, 0, strict=False) # writes 00000000 +>>> encode_output_value(se, 100) # writes 00000064 +>>> encode_output_value(se, 2 ** 31 - 1) # writes 7fffffff +>>> encode_output_value(se, 2 ** 31) # writes ffffffff80000000 +>>> encode_output_value(se, 2 ** 63) # writes 8000000000000000 +>>> bytes(se.finalize()).hex() +'00000000000000647fffffffffffffff800000008000000000000000' + +>>> se = Serializer.build_bytes_serializer() +>>> try: +... encode_output_value(se, 2 ** 63 + 1) +... except ValueError as e: +... print(*e.args) +Number is too big; max possible value is 2**63, got: 9223372036854775809 + +>>> de = Deserializer.build_bytes_deserializer(b'\x00\x00\x00\x00') +>>> try: +... decode_output_value(de) +... except ValueError as e: +... print(*e.args) +Number must be strictly positive + +>>> data = bytes.fromhex('00000000000000647fffffffffffffff800000008000000000000000') + b'test' +>>> de = Deserializer.build_bytes_deserializer(data) +>>> decode_output_value(de, strict=False) # reads 00000000 +0 +>>> decode_output_value(de) # reads 00000064 +100 +>>> decode_output_value(de) # reads 7fffffff +2147483647 +>>> decode_output_value(de) # reads ffffffff80000000 +2147483648 +>>> decode_output_value(de) # reads 8000000000000000 +9223372036854775808 +>>> bytes(de.read_all()) +b'test' +>>> de.finalize() +""" + +import struct + +from hathor.serialization import Deserializer, Serializer +from hathor.serialization.exceptions import BadDataError + +MAX_OUTPUT_VALUE_32 = 2 ** 31 - 1 # max value (inclusive) before having to use 8 bytes: 2_147_483_647 +MAX_OUTPUT_VALUE_64 = 2 ** 63 # max value (inclusive) that can be encoded (with 8 bytes): 9_223_372_036_854_775_808 + + +def encode_output_value(serializer: Serializer, number: int, *, strict: bool = True) -> None: + """ Encodes either 4 or 8 bytes using our output-value format. + + This modules's docstring has more details and examples. + """ + assert isinstance(number, int) + if number < 0: + raise ValueError('Number must not be negative') + if strict and number == 0: + raise ValueError('Number must be strictly positive') + if number > MAX_OUTPUT_VALUE_64: + raise ValueError(f'Number is too big; max possible value is 2**63, got: {number}') + # XXX: `signed` makes no difference, but oh well + if number > MAX_OUTPUT_VALUE_32: + serializer.write_bytes((-number).to_bytes(8, byteorder='big', signed=True)) + else: + serializer.write_bytes(number.to_bytes(4, byteorder='big', signed=True)) + + +def decode_output_value(deserializer: Deserializer, *, strict: bool = True) -> int: + """ Decodes either 4 or 8 bytes using our output-value format. + + This modules's docstring has more details and examples. + """ + value_high_byte, = deserializer.peek_struct('!b') + try: + if value_high_byte < 0: + raw_value, = deserializer.read_struct('!q') + value = -raw_value + else: + value, = deserializer.read_struct('!i') + except struct.error as e: + raise BadDataError('Invalid byte struct for output') from e + assert value >= 0 + if strict and value == 0: + raise ValueError('Number must be strictly positive') + if value < MAX_OUTPUT_VALUE_32 and value_high_byte < 0: + raise ValueError('Value fits in 4 bytes but is using 8 bytes') + return value diff --git a/hathor/serialization/encoding/utf8.py b/hathor/serialization/encoding/utf8.py new file mode 100644 index 000000000..d30c1e506 --- /dev/null +++ b/hathor/serialization/encoding/utf8.py @@ -0,0 +1,58 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +r""" +This module implements utf-8 string encoding with a length prefix. + +It works exactly like bytes-encoding but the encoded byte-sequence is utf-8 and it takes/returns a `str`. + +>>> se = Serializer.build_bytes_serializer() +>>> encode_utf8(se, 'foobar') # writes 06666f6f626172 +>>> encode_utf8(se, 'ハトホル') # writes 0ce3838fe38388e3839be383ab +>>> encode_utf8(se, '😎') # writes 04f09f988e +>>> bytes(se.finalize()).hex() +'06666f6f6261720ce3838fe38388e3839be383ab04f09f988e' + +>>> de = Deserializer.build_bytes_deserializer(bytes.fromhex('06666f6f6261720ce3838fe38388e3839be383ab04f09f988e')) +>>> decode_utf8(de) # reads 06666f6f626172 +'foobar' +>>> decode_utf8(de) # reads 0ce3838fe38388e3839be383ab +'ハトホル' +>>> decode_utf8(de) # reads 04f09f988e +'😎' +>>> de.finalize() +""" + +from hathor.serialization import Deserializer, Serializer + +from .bytes import decode_bytes, encode_bytes + + +def encode_utf8(serializer: Serializer, value: str) -> None: + """ Encodes a string using UTF-8 and adding a length prefix. + + This modules's docstring has more details and examples. + """ + assert isinstance(value, str) + data = value.encode('utf-8') + encode_bytes(serializer, data) + + +def decode_utf8(deserializer: Deserializer) -> str: + """ Decodes a UTF-8 string with a length prefix. + + This modules's docstring has more details and examples. + """ + data = decode_bytes(deserializer) + return data.decode('utf-8') diff --git a/hathor/serialization/exceptions.py b/hathor/serialization/exceptions.py new file mode 100644 index 000000000..ae69396d9 --- /dev/null +++ b/hathor/serialization/exceptions.py @@ -0,0 +1,37 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct + +from hathor.exception import HathorError + + +class SerializationError(HathorError): + pass + + +class UnsupportedTypeError(SerializationError): + pass + + +class TooLongError(SerializationError): + pass + + +class OutOfDataError(SerializationError, struct.error): + pass + + +class BadDataError(SerializationError): + pass diff --git a/hathor/serialization/serializer.py b/hathor/serialization/serializer.py new file mode 100644 index 000000000..46d4135e5 --- /dev/null +++ b/hathor/serialization/serializer.py @@ -0,0 +1,78 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import struct +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, TypeVar, overload + +from typing_extensions import Self + +from .types import Buffer + +if TYPE_CHECKING: + from .adapters import MaxBytesSerializer + from .bytes_serializer import BytesSerializer + +T = TypeVar('T') + + +class Serializer(ABC): + def finalize(self) -> Buffer: + """Get the resulting byte sequence, the serializer cannot be reused after this.""" + raise TypeError('this serializer does not support finalization') + + @abstractmethod + def cur_pos(self) -> int: + raise NotImplementedError + + @abstractmethod + def write_byte(self, data: int) -> None: + """Write a single byte.""" + raise NotImplementedError + + @abstractmethod + def write_bytes(self, data: Buffer) -> None: + # XXX: it is recommended that implementors of Serializer specialize this implementation + for byte in bytes(memoryview(data)): + self.write_byte(byte) + + def write_struct(self, data: tuple[Any, ...], format: str) -> None: + data_bytes = struct.pack(format, *data) + self.write_bytes(data_bytes) + + def with_max_bytes(self, max_bytes: int) -> MaxBytesSerializer[Self]: + """Helper method to wrap the current serializer with MaxBytesSerializer.""" + from .adapters import MaxBytesSerializer + return MaxBytesSerializer(self, max_bytes) + + @overload + def with_optional_max_bytes(self, max_bytes: None) -> Self: + ... + + @overload + def with_optional_max_bytes(self, max_bytes: int) -> MaxBytesSerializer[Self]: + ... + + def with_optional_max_bytes(self, max_bytes: int | None) -> Self | MaxBytesSerializer[Self]: + """Helper method to optionally wrap the current serializer.""" + if max_bytes is None: + return self + return self.with_max_bytes(max_bytes) + + @staticmethod + def build_bytes_serializer() -> BytesSerializer: + from .bytes_serializer import BytesSerializer + return BytesSerializer() diff --git a/hathor/serialization/types.py b/hathor/serialization/types.py new file mode 100644 index 000000000..e37136d9a --- /dev/null +++ b/hathor/serialization/types.py @@ -0,0 +1,17 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TypeAlias + +Buffer: TypeAlias = bytes | memoryview diff --git a/hathor/simulator/miner/geometric_miner.py b/hathor/simulator/miner/geometric_miner.py index a7828e015..f96d3fb84 100644 --- a/hathor/simulator/miner/geometric_miner.py +++ b/hathor/simulator/miner/geometric_miner.py @@ -91,7 +91,7 @@ def _schedule_next_block(self): self._block.nonce = self._rng.getrandbits(32) self._block.update_hash() self.log.debug('randomized step: found new block', hash=self._block.hash_hex, nonce=self._block.nonce) - self._manager.propagate_tx(self._block, fails_silently=False) + self._manager.propagate_tx(self._block) self._blocks_found += 1 self._blocks_before_pause -= 1 self._block = None diff --git a/hathor/simulator/patches.py b/hathor/simulator/patches.py index 95e9d4ebf..fc6b124bd 100644 --- a/hathor/simulator/patches.py +++ b/hathor/simulator/patches.py @@ -15,6 +15,7 @@ from typing import Optional from structlog import get_logger +from typing_extensions import override from hathor.mining.cpu_mining_service import CpuMiningService from hathor.transaction import BaseTransaction @@ -30,7 +31,13 @@ def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] class SimulatorCpuMiningService(CpuMiningService): - def resolve(self, vertex: BaseTransaction, *, update_time: bool = False) -> bool: + @override + def resolve( + self, + vertex: BaseTransaction, + *, + update_time: bool = False, + ) -> bool: vertex.update_hash() logger.new().debug('Skipping CpuMiningService.resolve() for simulator') return True diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index c776b7da7..18b555d96 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -81,9 +81,7 @@ def get_default_builder(self) -> Builder: return Builder() \ .set_peer(PrivatePeer.auto_generated()) \ .set_soft_voided_tx_ids(set()) \ - .enable_full_verification() \ .enable_sync_v2() \ - .use_memory() \ .set_settings(self.settings) def create_peer(self, builder: Optional[Builder] = None) -> HathorManager: @@ -250,7 +248,7 @@ def _build_vertex_verifiers( """ return VertexVerifiers.create( settings=settings, - vertex_verifier=SimulatorVertexVerifier(settings=settings), + vertex_verifier=SimulatorVertexVerifier(settings=settings, feature_service=feature_service), daa=daa, feature_service=feature_service, ) diff --git a/hathor/simulator/tx_generator.py b/hathor/simulator/tx_generator.py index ead648da5..ce6ff1037 100644 --- a/hathor/simulator/tx_generator.py +++ b/hathor/simulator/tx_generator.py @@ -94,7 +94,7 @@ def schedule_next_transaction(self): """ Schedule the generation of a new transaction. """ if self.tx: - ret = self.manager.propagate_tx(self.tx, fails_silently=False) + ret = self.manager.propagate_tx(self.tx) assert ret is True self.transactions_found += 1 self.latest_transactions.appendleft(self.tx.hash) diff --git a/hathor/simulator/utils.py b/hathor/simulator/utils.py index 792380ae2..61562da50 100644 --- a/hathor/simulator/utils.py +++ b/hathor/simulator/utils.py @@ -109,7 +109,7 @@ def add_new_block( block.signal_bits = signal_bits manager.cpu_mining_service.resolve(block) if propagate: - manager.propagate_tx(block, fails_silently=False) + manager.propagate_tx(block) if advance_clock: assert hasattr(manager.reactor, 'advance') manager.reactor.advance(advance_clock) diff --git a/hathor/storage/rocksdb_storage.py b/hathor/storage/rocksdb_storage.py index 232a2ec71..a92742c0b 100644 --- a/hathor/storage/rocksdb_storage.py +++ b/hathor/storage/rocksdb_storage.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os -from typing import TYPE_CHECKING, Optional +from __future__ import annotations -if TYPE_CHECKING: - import rocksdb +import os +import tempfile +import rocksdb from structlog import get_logger +from typing_extensions import assert_never logger = get_logger() _DB_NAME = 'data_v2.db' @@ -28,12 +29,16 @@ class RocksDBStorage: """ Creates a RocksDB database Give clients the option to create column families """ - def __init__(self, path: str = './', cache_capacity: Optional[int] = None): - import rocksdb + def __init__( + self, + path: str | tempfile.TemporaryDirectory, + cache_capacity: int | None = None, + ) -> None: self.log = logger.new() - self._path = path + # We have to keep a reference to the TemporaryDirectory because it is cleaned up when garbage collected. + self.path, self.temp_dir = self._get_path_and_temp_dir(path) - db_path = os.path.join(path, _DB_NAME) + db_path = os.path.join(self.path, _DB_NAME) lru_cache = cache_capacity and rocksdb.LRUCache(cache_capacity) table_factory = rocksdb.BlockBasedTableFactory(block_cache=lru_cache) options = rocksdb.Options( @@ -58,13 +63,30 @@ def __init__(self, path: str = './', cache_capacity: Optional[int] = None): # finally, open the database self._db = rocksdb.DB(db_path, options, column_families=column_families) + self.log.info('starting rocksdb', path=self.path) self.log.debug('open db', cf_list=[cf.name.decode('ascii') for cf in self._db.column_families]) - def get_db(self) -> 'rocksdb.DB': + @staticmethod + def create_temp(cache_capacity: int | None = None) -> RocksDBStorage: + """Create a RocksDBStorage instance with a temporary directory.""" + return RocksDBStorage(path=tempfile.TemporaryDirectory(), cache_capacity=cache_capacity) + + @staticmethod + def _get_path_and_temp_dir( + path: str | tempfile.TemporaryDirectory, + ) -> tuple[str, tempfile.TemporaryDirectory | None]: + match path: + case str(): + return path, None + case tempfile.TemporaryDirectory(): + return path.name, path + case _: + assert_never(path) + + def get_db(self) -> rocksdb.DB: return self._db def get_or_create_column_family(self, cf_name: bytes) -> 'rocksdb.ColumnFamilyHandle': - import rocksdb cf = self._db.get_column_family(cf_name) if cf is None: cf = self._db.create_column_family(cf_name, rocksdb.ColumnFamilyOptions()) diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index a03f05271..2b85f9919 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -36,6 +36,7 @@ from hathor.conf.get_settings import get_global_settings from hathor.crypto.util import decode_address from hathor.exception import InvalidNewTransaction +from hathor.feature_activation.feature_service import FeatureService from hathor.p2p.utils import format_address from hathor.pubsub import EventArguments, HathorEvents from hathor.reactor import ReactorProtocol as Reactor @@ -515,7 +516,7 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: }) tx = job.tx.clone() - block_base = tx.get_header_without_nonce() + block_base = tx.get_mining_header_without_nonce() block_base_hash = sha256d_hash(block_base) # Stratum sends the nonce as a big-endian hexadecimal string. if params.get('aux_pow'): @@ -528,7 +529,8 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: self.log.debug('share received', block=tx, block_base=block_base.hex(), block_base_hash=block_base_hash.hex()) - verifier = VertexVerifier(settings=self._settings) + feature_service = FeatureService(settings=self._settings, tx_storage=self.manager.tx_storage) + verifier = VertexVerifier(settings=self._settings, feature_service=feature_service) try: verifier.verify_pow(tx, override_weight=job.weight) @@ -560,7 +562,7 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: # We only propagate blocks here in stratum # For tx we need to propagate in the resource, # so we can get the possible errors - self.manager.submit_block(tx, fails_silently=False) + self.manager.submit_block(tx) self.blocks_found += 1 except (InvalidNewTransaction, TxValidationError) as e: # Block propagation failed, but the share was succesfully submited @@ -600,7 +602,7 @@ def job_request(self) -> None: else: if job: job_data = { - 'data': job.tx.get_header_without_nonce().hex(), + 'data': job.tx.get_mining_header_without_nonce().hex(), 'job_id': job.id.hex, 'nonce_size': job.tx.SERIALIZATION_NONCE_SIZE, 'weight': float(job.weight), diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index a6a7b85ac..b75c672c3 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -23,17 +23,26 @@ from enum import IntEnum from itertools import chain from math import isfinite, log -from struct import error as StructError, pack +from struct import pack from typing import TYPE_CHECKING, Any, ClassVar, Generic, Iterator, Optional, TypeAlias, TypeVar from structlog import get_logger +from typing_extensions import Self from hathor.checkpoint import Checkpoint from hathor.conf.get_settings import get_global_settings from hathor.transaction.exceptions import InvalidOutputValue, WeightError +from hathor.transaction.headers import VertexBaseHeader from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.transaction_metadata import TransactionMetadata -from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len +from hathor.transaction.util import ( + VerboseCallback, + bytes_to_output_value, + int_to_bytes, + output_value_to_bytes, + unpack, + unpack_len, +) from hathor.transaction.validation_state import ValidationState from hathor.types import TokenUid, TxOutputScript, VertexId from hathor.util import classproperty @@ -48,7 +57,6 @@ logger = get_logger() MAX_OUTPUT_VALUE = 2**63 # max value (inclusive) that is possible to encode: 9223372036854775808 ~= 9.22337e+18 -_MAX_OUTPUT_VALUE_32 = 2**31 - 1 # max value (inclusive) before having to use 8 bytes: 2147483647 ~= 2.14748e+09 TX_HASH_SIZE = 32 # 256 bits, 32 bytes @@ -91,7 +99,9 @@ class TxVersion(IntEnum): REGULAR_TRANSACTION = 1 TOKEN_CREATION_TRANSACTION = 2 MERGE_MINED_BLOCK = 3 + # DEPRECATED_NANO_CONTRACT = 4 # XXX: Temporary to keep compatibility POA_BLOCK = 5 + ON_CHAIN_BLUEPRINT = 6 @classmethod def _missing_(cls, value: Any) -> None: @@ -115,6 +125,11 @@ def get_cls(self) -> type['BaseTransaction']: TxVersion.POA_BLOCK: PoaBlock } + settings = get_global_settings() + if settings.ENABLE_NANO_CONTRACTS: + from hathor.nanocontracts.on_chain_blueprint import OnChainBlueprint + cls_map[TxVersion.ON_CHAIN_BLUEPRINT] = OnChainBlueprint + cls = cls_map.get(self) if cls is None: @@ -131,6 +146,10 @@ def get_cls(self) -> type['BaseTransaction']: class GenericVertex(ABC, Generic[StaticMetadataT]): """Hathor generic vertex""" + __slots__ = ['version', 'signal_bits', 'weight', 'timestamp', 'nonce', 'inputs', 'outputs', 'parents', '_hash', + 'storage', '_settings', '_metadata', '_static_metadata', 'headers', 'name', 'MAX_NUM_INPUTS', + 'MAX_NUM_OUTPUTS', '__weakref__'] + # Even though nonce is serialized with different sizes for tx and blocks # the same size is used for hashes to enable mining algorithm compatibility SERIALIZATION_NONCE_SIZE: ClassVar[int] @@ -185,6 +204,14 @@ def __init__( self._hash: VertexId | None = hash # Stored as bytes. self._static_metadata = None + self.headers: list[VertexBaseHeader] = [] + + # A name solely for debugging purposes. + self.name: str | None = None + + self.MAX_NUM_INPUTS = self._settings.MAX_NUM_INPUTS + self.MAX_NUM_OUTPUTS = self._settings.MAX_NUM_OUTPUTS + @classproperty def log(cls): """ This is a workaround because of a bug on structlog (or abc). @@ -231,6 +258,10 @@ def is_block(self) -> bool: def is_transaction(self) -> bool: raise NotImplementedError + def is_nano_contract(self) -> bool: + """Return True if this transaction is a nano contract or not.""" + return False + def get_fields_from_struct(self, struct_bytes: bytes, *, verbose: VerboseCallback = None) -> bytes: """ Gets all common fields for a Transaction and a Block from a buffer. @@ -246,10 +277,26 @@ def get_fields_from_struct(self, struct_bytes: bytes, *, verbose: VerboseCallbac buf = self.get_graph_fields_from_struct(buf, verbose=verbose) return buf + def get_header_from_bytes(self, buf: bytes, *, verbose: VerboseCallback = None) -> bytes: + """Parse bytes and return the next header in buffer.""" + from hathor.transaction.vertex_parser import VertexParser + + if len(self.headers) >= self.get_maximum_number_of_headers(): + raise ValueError('too many headers') + header_type = buf[:1] + header_class = VertexParser.get_header_parser(header_type, self._settings) + header, buf = header_class.deserialize(self, buf) + self.headers.append(header) + return buf + + def get_maximum_number_of_headers(self) -> int: + """Return the maximum number of headers for this vertex.""" + return 1 + @classmethod @abstractmethod def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionStorage'] = None, - *, verbose: VerboseCallback = None) -> 'BaseTransaction': + *, verbose: VerboseCallback = None) -> Self: """ Create a transaction from its bytes. :param struct_bytes: Bytes of a serialized transaction @@ -396,6 +443,10 @@ def get_graph_struct(self) -> bytes: struct_bytes += parent return struct_bytes + def get_headers_struct(self) -> bytes: + """Return the serialization of the headers only.""" + return b''.join(h.serialize() for h in self.headers) + def get_struct_without_nonce(self) -> bytes: """Return a partial serialization of the transaction, without including the nonce field @@ -423,17 +474,13 @@ def get_struct(self) -> bytes: """ struct_bytes = self.get_struct_without_nonce() struct_bytes += self.get_struct_nonce() + struct_bytes += self.get_headers_struct() return struct_bytes def get_all_dependencies(self) -> set[bytes]: """Set of all tx-hashes needed to fully validate this tx, including parent blocks/txs and inputs.""" return set(chain(self.parents, (i.tx_id for i in self.inputs))) - def get_tx_dependencies(self) -> set[bytes]: - """Set of all tx-hashes needed to fully validate this, except for block parent, i.e. only tx parents/inputs.""" - parents = self.parents[1:] if self.is_block else self.parents - return set(chain(parents, (i.tx_id for i in self.inputs))) - def get_tx_parents(self) -> set[bytes]: """Set of parent tx hashes, typically used for syncing transactions.""" return set(self.parents[1:] if self.is_block else self.parents) @@ -520,23 +567,26 @@ def get_funds_hash(self) -> bytes: funds_hash.update(self.get_funds_struct()) return funds_hash.digest() - def get_graph_hash(self) -> bytes: - """Return the sha256 of the graph part of the transaction + def get_graph_and_headers_hash(self) -> bytes: + """Return the sha256 of the graph part of the transaction + its headers - :return: the hash of the funds data + :return: the hash of the graph and headers data :rtype: bytes """ - graph_hash = hashlib.sha256() - graph_hash.update(self.get_graph_struct()) - return graph_hash.digest() + h = hashlib.sha256() + h.update(self.get_graph_struct()) + h.update(self.get_headers_struct()) + return h.digest() - def get_header_without_nonce(self) -> bytes: + def get_mining_header_without_nonce(self) -> bytes: """Return the transaction header without the nonce :return: transaction header without the nonce :rtype: bytes """ - return self.get_funds_hash() + self.get_graph_hash() + data = self.get_funds_hash() + self.get_graph_and_headers_hash() + assert len(data) == 64, 'the mining data should have a fixed size of 64 bytes' + return data def calculate_hash1(self) -> 'HASH': """Return the sha256 of the transaction without including the `nonce` @@ -545,7 +595,7 @@ def calculate_hash1(self) -> 'HASH': :rtype: :py:class:`_hashlib.HASH` """ calculate_hash1 = hashlib.sha256() - calculate_hash1.update(self.get_header_without_nonce()) + calculate_hash1.update(self.get_mining_header_without_nonce()) return calculate_hash1 def calculate_hash2(self, part1: 'HASH') -> bytes: @@ -816,19 +866,20 @@ def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> dict[str, Any]: return ret - def clone(self, *, include_metadata: bool = True, include_storage: bool = True) -> 'BaseTransaction': + def clone(self, *, include_metadata: bool = True, include_storage: bool = True) -> Self: """Return exact copy without sharing memory, including metadata if loaded. :return: Transaction or Block copy """ - new_tx = self.create_from_struct(self.get_struct()) + new_tx = self.create_from_struct( + self.get_struct(), + storage=self.storage if include_storage else None, + ) # static_metadata can be safely copied as it is a frozen dataclass new_tx.set_static_metadata(self._static_metadata) if hasattr(self, '_metadata') and include_metadata: assert self._metadata is not None # FIXME: is this actually true or do we have to check if not None new_tx._metadata = self._metadata.clone() - if include_storage: - new_tx.storage = self.storage return new_tx @abstractmethod @@ -1080,32 +1131,3 @@ def to_json(self, *, decode_script: bool = False) -> dict[str, Any]: if decode_script: data['decoded'] = self.to_human_readable() return data - - -def bytes_to_output_value(buf: bytes) -> tuple[int, bytes]: - (value_high_byte,), _ = unpack('!b', buf) - if value_high_byte < 0: - output_struct = '!q' - value_sign = -1 - else: - output_struct = '!i' - value_sign = 1 - try: - (signed_value,), buf = unpack(output_struct, buf) - except StructError as e: - raise InvalidOutputValue('Invalid byte struct for output') from e - value = signed_value * value_sign - assert value >= 0 - if value < _MAX_OUTPUT_VALUE_32 and value_high_byte < 0: - raise ValueError('Value fits in 4 bytes but is using 8 bytes') - return value, buf - - -def output_value_to_bytes(number: int) -> bytes: - if number <= 0: - raise InvalidOutputValue('Invalid value for output') - - if number > _MAX_OUTPUT_VALUE_32: - return (-number).to_bytes(8, byteorder='big', signed=True) - else: - return number.to_bytes(4, byteorder='big', signed=True) # `signed` makes no difference, but oh well diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index 9f5f5a06d..4fd77e4c1 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -23,7 +23,7 @@ from hathor.checkpoint import Checkpoint from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState -from hathor.transaction import BaseTransaction, TxOutput, TxVersion +from hathor.transaction import TxOutput, TxVersion from hathor.transaction.base_transaction import GenericVertex from hathor.transaction.exceptions import CheckpointError from hathor.transaction.static_metadata import BlockStaticMetadata @@ -32,6 +32,7 @@ if TYPE_CHECKING: from hathor.conf.settings import HathorSettings + from hathor.transaction import Transaction from hathor.transaction.storage import TransactionStorage # noqa: F401 # Signal bits (B), version (B), outputs len (B) @@ -94,10 +95,15 @@ def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionS blc = cls() buf = blc.get_fields_from_struct(struct_bytes, verbose=verbose) - blc.nonce = int.from_bytes(buf, byteorder='big') - if len(buf) != cls.SERIALIZATION_NONCE_SIZE: + if len(buf) < cls.SERIALIZATION_NONCE_SIZE: raise ValueError('Invalid sequence of bytes') + blc.nonce = int.from_bytes(buf[:cls.SERIALIZATION_NONCE_SIZE], byteorder='big') + buf = buf[cls.SERIALIZATION_NONCE_SIZE:] + + while buf: + buf = blc.get_header_from_bytes(buf, verbose=verbose) + blc.hash = blc.calculate_hash() blc.storage = storage @@ -290,9 +296,9 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: # TODO: check whether self is a parent of any checkpoint-valid block, this is left for a future PR pass - def get_base_hash(self) -> bytes: + def get_mining_base_hash(self) -> bytes: from hathor.merged_mining.bitcoin import sha256d_hash - return sha256d_hash(self.get_header_without_nonce()) + return sha256d_hash(self.get_mining_header_without_nonce()) def get_height(self) -> int: """Return this block's height.""" @@ -336,18 +342,16 @@ def set_feature_state(self, *, feature: Feature, state: FeatureState, save: bool """ previous_state = self.get_feature_state(feature=feature) - if state == previous_state: - return - - assert previous_state is None - assert self.storage is not None - - metadata = self.get_metadata() - feature_states = metadata.feature_states or {} - feature_states[feature] = state - metadata.feature_states = feature_states + if state != previous_state: + # we are settings the state for the first time in this block + assert previous_state is None + metadata = self.get_metadata() + feature_states = metadata.feature_states or {} + feature_states[feature] = state + metadata.feature_states = feature_states if save: + assert self.storage is not None self.storage.save_transaction(self, only_metadata=True) def get_feature_activation_bit_value(self, bit: int) -> int: @@ -356,8 +360,9 @@ def get_feature_activation_bit_value(self, bit: int) -> int: return bit_list[bit] - def iter_transactions_in_this_block(self) -> Iterator[BaseTransaction]: + def iter_transactions_in_this_block(self) -> Iterator[Transaction]: """Return an iterator of the transactions that have this block as meta.first_block.""" + from hathor.transaction import Transaction from hathor.transaction.storage.traversal import BFSOrderWalk assert self.storage is not None bfs = BFSOrderWalk(self.storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) @@ -366,6 +371,7 @@ def iter_transactions_in_this_block(self) -> Iterator[BaseTransaction]: if tx_meta.first_block != self.hash: bfs.skip_neighbors(tx) continue + assert isinstance(tx, Transaction) yield tx @override diff --git a/hathor/transaction/exceptions.py b/hathor/transaction/exceptions.py index 2d1bfbda8..2ffebb7a6 100644 --- a/hathor/transaction/exceptions.py +++ b/hathor/transaction/exceptions.py @@ -50,8 +50,8 @@ class InvalidInputDataSize(TxValidationError): """Input data is too big""" -class NoInputError(TxValidationError): - """There is not input""" +class TooFewInputs(TxValidationError): + """There are less inputs than the minimum required""" class InvalidScriptError(TxValidationError): @@ -66,6 +66,14 @@ class TooManyInputs(TxValidationError): """More than 256 inputs""" +class TooManyHeaders(TxValidationError): + """Vertex has more vertex than the maximum allowed.""" + + +class HeaderNotSupported(TxValidationError): + """Vertex contains a header that is not supported by its type.""" + + class InexistentInput(TxValidationError): """Input tx does not exist or index spent does not exist""" diff --git a/hathor/transaction/headers/__init__.py b/hathor/transaction/headers/__init__.py new file mode 100644 index 000000000..029f61078 --- /dev/null +++ b/hathor/transaction/headers/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction.headers.base import VertexBaseHeader +from hathor.transaction.headers.nano_header import NanoHeader +from hathor.transaction.headers.types import VertexHeaderId + +__all__ = [ + 'VertexBaseHeader', + 'VertexHeaderId', + 'NanoHeader', +] diff --git a/hathor/transaction/headers/base.py b/hathor/transaction/headers/base.py new file mode 100644 index 000000000..aba002ad9 --- /dev/null +++ b/hathor/transaction/headers/base.py @@ -0,0 +1,47 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +from hathor.transaction.util import VerboseCallback + +if TYPE_CHECKING: + from hathor.transaction.base_transaction import BaseTransaction + + +class VertexBaseHeader(ABC): + @classmethod + @abstractmethod + def deserialize( + cls, + tx: BaseTransaction, + buf: bytes, + *, + verbose: VerboseCallback = None + ) -> tuple[VertexBaseHeader, bytes]: + """Deserialize header from `buf` which starts with header id.""" + raise NotImplementedError + + @abstractmethod + def serialize(self) -> bytes: + """Serialize header with header id as prefix.""" + raise NotImplementedError + + @abstractmethod + def get_sighash_bytes(self) -> bytes: + """Return sighash bytes to check digital signatures.""" + raise NotImplementedError diff --git a/hathor/transaction/headers/nano_header.py b/hathor/transaction/headers/nano_header.py new file mode 100644 index 000000000..709df3031 --- /dev/null +++ b/hathor/transaction/headers/nano_header.py @@ -0,0 +1,330 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import deque +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from typing_extensions import assert_never + +from hathor.transaction.headers.base import VertexBaseHeader +from hathor.transaction.headers.types import VertexHeaderId +from hathor.transaction.util import ( + VerboseCallback, + bytes_to_output_value, + int_to_bytes, + output_value_to_bytes, + unpack, + unpack_len, +) +from hathor.types import VertexId +from hathor.utils import leb128 + +if TYPE_CHECKING: + from hathor.nanocontracts.context import Context + from hathor.nanocontracts.types import BlueprintId, ContractId, NCAction, NCActionType, TokenUid + from hathor.transaction import Transaction + from hathor.transaction.base_transaction import BaseTransaction + from hathor.transaction.block import Block + +ADDRESS_LEN_BYTES: int = 25 +ADDRESS_SEQNUM_SIZE: int = 8 # bytes +_NC_SCRIPT_LEN_MAX_BYTES: int = 2 + + +@dataclass(slots=True, kw_only=True, frozen=True) +class NanoHeaderAction: + type: NCActionType + token_index: int + amount: int + + def to_nc_action(self, tx: Transaction) -> NCAction: + """Create a NCAction from this NanoHeaderAction""" + from hathor.nanocontracts.types import ( + NCAcquireAuthorityAction, + NCActionType, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, + TokenUid, + ) + from hathor.transaction.base_transaction import TxOutput + + try: + token_uid = TokenUid(tx.get_token_uid(self.token_index)) + except IndexError: + from hathor.nanocontracts.exception import NCInvalidAction + raise NCInvalidAction(f'{self.type.name} token index {self.token_index} not found') + + match self.type: + case NCActionType.DEPOSIT: + return NCDepositAction(token_uid=token_uid, amount=self.amount) + case NCActionType.WITHDRAWAL: + return NCWithdrawalAction(token_uid=token_uid, amount=self.amount) + case NCActionType.GRANT_AUTHORITY: + mint = self.amount & TxOutput.TOKEN_MINT_MASK > 0 + melt = self.amount & TxOutput.TOKEN_MELT_MASK > 0 + self._validate_authorities(token_uid) + return NCGrantAuthorityAction(token_uid=token_uid, mint=mint, melt=melt) + case NCActionType.ACQUIRE_AUTHORITY: + mint = self.amount & TxOutput.TOKEN_MINT_MASK > 0 + melt = self.amount & TxOutput.TOKEN_MELT_MASK > 0 + self._validate_authorities(token_uid) + return NCAcquireAuthorityAction(token_uid=token_uid, mint=mint, melt=melt) + case _: + assert_never(self.type) + + def _validate_authorities(self, token_uid: TokenUid) -> None: + """Check that the authorities in the `amount` are valid.""" + from hathor.transaction.base_transaction import TxOutput + if self.amount > TxOutput.ALL_AUTHORITIES: + from hathor.nanocontracts.exception import NCInvalidAction + raise NCInvalidAction( + f'action {self.type.name} token {token_uid.hex()} invalid authorities: 0b{self.amount:b}' + ) + + +@dataclass(slots=True, kw_only=True) +class NanoHeader(VertexBaseHeader): + tx: Transaction + + # Sequence number for the caller. + nc_seqnum: int + + # nc_id equals to the blueprint_id when a Nano Contract is being created. + # nc_id equals to the contract_id when a method is being called. + nc_id: VertexId + + # Name of the method to be called. When creating a new Nano Contract, it must be equal to 'initialize'. + nc_method: str + + # Serialized arguments to nc_method. + nc_args_bytes: bytes + + nc_actions: list[NanoHeaderAction] + + # Address and script with signature(s) of the transaction owner(s)/caller(s). Supports P2PKH and P2SH. + nc_address: bytes + nc_script: bytes + + @classmethod + def _deserialize_action(cls, buf: bytes) -> tuple[NanoHeaderAction, bytes]: + from hathor.nanocontracts.types import NCActionType + type_bytes, buf = buf[:1], buf[1:] + action_type = NCActionType.from_bytes(type_bytes) + (token_index,), buf = unpack('!B', buf) + amount, buf = bytes_to_output_value(buf) + return NanoHeaderAction( + type=action_type, + token_index=token_index, + amount=amount, + ), buf + + @classmethod + def deserialize( + cls, + tx: BaseTransaction, + buf: bytes, + *, + verbose: VerboseCallback = None + ) -> tuple[NanoHeader, bytes]: + from hathor.transaction import Transaction + assert isinstance(tx, Transaction) + buf = memoryview(buf) + + header_id, buf = buf[:1], buf[1:] + if verbose: + verbose('header_id', header_id) + assert header_id == VertexHeaderId.NANO_HEADER.value + + nc_id, buf = unpack_len(32, buf) + if verbose: + verbose('nc_id', nc_id) + nc_seqnum, buf = leb128.decode_unsigned(buf, max_bytes=ADDRESS_SEQNUM_SIZE) + if verbose: + verbose('nc_seqnum', nc_seqnum) + (nc_method_len,), buf = unpack('!B', buf) + if verbose: + verbose('nc_method_len', nc_method_len) + nc_method, buf = unpack_len(nc_method_len, buf) + if verbose: + verbose('nc_method', nc_method) + (nc_args_bytes_len,), buf = unpack('!H', buf) + if verbose: + verbose('nc_args_bytes_len', nc_args_bytes_len) + nc_args_bytes, buf = unpack_len(nc_args_bytes_len, buf) + if verbose: + verbose('nc_args_bytes', nc_args_bytes) + + nc_actions: list[NanoHeaderAction] = [] + (nc_actions_len,), buf = unpack('!B', buf) + if verbose: + verbose('nc_actions_len', nc_actions_len) + for _ in range(nc_actions_len): + action, buf = cls._deserialize_action(buf) + nc_actions.append(action) + + nc_address, buf = unpack_len(ADDRESS_LEN_BYTES, buf) + if verbose: + verbose('nc_address', nc_address) + nc_script_len, buf = leb128.decode_unsigned(buf, max_bytes=_NC_SCRIPT_LEN_MAX_BYTES) + if verbose: + verbose('nc_script_len', nc_script_len) + nc_script, buf = unpack_len(nc_script_len, buf) + if verbose: + verbose('nc_script', nc_script) + + decoded_nc_method = nc_method.decode('ascii') + + return cls( + tx=tx, + nc_seqnum=nc_seqnum, + nc_id=nc_id, + nc_method=decoded_nc_method, + nc_args_bytes=nc_args_bytes, + nc_actions=nc_actions, + nc_address=nc_address, + nc_script=nc_script, + ), bytes(buf) + + def _serialize_action(self, action: NanoHeaderAction) -> bytes: + ret = [ + action.type.to_bytes(), + int_to_bytes(action.token_index, 1), + output_value_to_bytes(action.amount), + ] + return b''.join(ret) + + def _serialize_without_header_id(self, *, skip_signature: bool) -> deque[bytes]: + """Serialize the header with the option to skip the signature.""" + encoded_method = self.nc_method.encode('ascii') + + ret: deque[bytes] = deque() + ret.append(self.nc_id) + ret.append(leb128.encode_unsigned(self.nc_seqnum, max_bytes=ADDRESS_SEQNUM_SIZE)) + ret.append(int_to_bytes(len(encoded_method), 1)) + ret.append(encoded_method) + ret.append(int_to_bytes(len(self.nc_args_bytes), 2)) + ret.append(self.nc_args_bytes) + + ret.append(int_to_bytes(len(self.nc_actions), 1)) + for action in self.nc_actions: + ret.append(self._serialize_action(action)) + + ret.append(self.nc_address) + if not skip_signature: + ret.append(leb128.encode_unsigned(len(self.nc_script), max_bytes=_NC_SCRIPT_LEN_MAX_BYTES)) + ret.append(self.nc_script) + else: + ret.append(leb128.encode_unsigned(0, max_bytes=_NC_SCRIPT_LEN_MAX_BYTES)) + return ret + + def serialize(self) -> bytes: + ret = self._serialize_without_header_id(skip_signature=False) + ret.appendleft(VertexHeaderId.NANO_HEADER.value) + return b''.join(ret) + + def get_sighash_bytes(self) -> bytes: + ret = self._serialize_without_header_id(skip_signature=True) + return b''.join(ret) + + def is_creating_a_new_contract(self) -> bool: + """Return true if this transaction is creating a new contract.""" + from hathor.nanocontracts.types import NC_INITIALIZE_METHOD + return self.nc_method == NC_INITIALIZE_METHOD + + def get_contract_id(self) -> ContractId: + """Return the contract id.""" + from hathor.nanocontracts.types import NC_INITIALIZE_METHOD, ContractId, VertexId + if self.nc_method == NC_INITIALIZE_METHOD: + return ContractId(VertexId(self.tx.hash)) + return ContractId(VertexId(self.nc_id)) + + def get_blueprint_id(self, block: Block | None = None) -> BlueprintId: + """Return the blueprint id.""" + from hathor.nanocontracts.exception import NanoContractDoesNotExist + from hathor.nanocontracts.types import BlueprintId, ContractId, VertexId as NCVertexId + from hathor.transaction import Transaction + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + assert self.tx.storage is not None + + if self.is_creating_a_new_contract(): + blueprint_id = BlueprintId(NCVertexId(self.nc_id)) + return blueprint_id + + if block is None: + block = self.tx.storage.get_best_block() + + try: + nc_storage = self.tx.storage.get_nc_storage(block, ContractId(NCVertexId(self.nc_id))) + blueprint_id = nc_storage.get_blueprint_id() + return blueprint_id + except NanoContractDoesNotExist: + # If the NC storage doesn't exist, the contract must be created by a tx in the mempool + pass + + try: + nc_creation = self.tx.storage.get_transaction(self.nc_id) + except TransactionDoesNotExist as e: + raise NanoContractDoesNotExist from e + + if not nc_creation.is_nano_contract(): + raise NanoContractDoesNotExist(f'not a nano contract tx: {self.nc_id.hex()}') + + assert isinstance(nc_creation, Transaction) + nano_header = nc_creation.get_nano_header() + + if not nano_header.is_creating_a_new_contract(): + raise NanoContractDoesNotExist(f'not a contract creation tx: {self.nc_id.hex()}') + + # must be in the mempool + nc_creation_meta = nc_creation.get_metadata() + if nc_creation_meta.first_block is not None: + # otherwise, it failed or skipped execution + from hathor.transaction.nc_execution_state import NCExecutionState + assert nc_creation_meta.nc_execution in (NCExecutionState.FAILURE, NCExecutionState.SKIPPED) + raise NanoContractDoesNotExist + + blueprint_id = BlueprintId(NCVertexId(nc_creation.get_nano_header().nc_id)) + return blueprint_id + + def get_actions(self) -> list[NCAction]: + """Get a list of NCActions from the header actions.""" + return [header_action.to_nc_action(self.tx) for header_action in self.nc_actions] + + def get_context(self) -> Context: + """Return a context to be used in a method call.""" + action_list = self.get_actions() + + meta = self.tx.get_metadata() + timestamp: int + if meta.first_block is None: + # XXX Which timestamp to use when it is on mempool? + timestamp = self.tx.timestamp + else: + assert self.tx.storage is not None + first_block = self.tx.storage.get_transaction(meta.first_block) + timestamp = first_block.timestamp + + from hathor.nanocontracts.context import Context + from hathor.nanocontracts.types import Address + context = Context( + actions=action_list, + vertex=self.tx, + address=Address(self.nc_address), + timestamp=timestamp, + ) + return context diff --git a/hathor/transaction/headers/types.py b/hathor/transaction/headers/types.py new file mode 100644 index 000000000..c12613ff1 --- /dev/null +++ b/hathor/transaction/headers/types.py @@ -0,0 +1,20 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum, unique + + +@unique +class VertexHeaderId(Enum): + NANO_HEADER = b'\x10' diff --git a/hathor/transaction/merge_mined_block.py b/hathor/transaction/merge_mined_block.py index 863909882..59c25e50b 100644 --- a/hathor/transaction/merge_mined_block.py +++ b/hathor/transaction/merge_mined_block.py @@ -16,6 +16,8 @@ from typing import TYPE_CHECKING, Any, Optional +from typing_extensions import Self + from hathor.transaction.aux_pow import BitcoinAuxPow from hathor.transaction.base_transaction import TxOutput, TxVersion from hathor.transaction.block import Block @@ -67,7 +69,7 @@ def _get_formatted_fields_dict(self, short: bool = True) -> dict[str, str]: @classmethod def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionStorage'] = None, - *, verbose: VerboseCallback = None) -> 'MergeMinedBlock': + *, verbose: VerboseCallback = None) -> Self: blc = cls() buf = blc.get_fields_from_struct(struct_bytes, verbose=verbose) blc.aux_pow = BitcoinAuxPow.from_bytes(buf) @@ -77,7 +79,7 @@ def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionS def calculate_hash(self) -> bytes: assert self.aux_pow is not None - return self.aux_pow.calculate_hash(self.get_base_hash()) + return self.aux_pow.calculate_hash(self.get_mining_base_hash()) def get_struct_nonce(self) -> bytes: if not self.aux_pow: diff --git a/hathor/transaction/nc_execution_state.py b/hathor/transaction/nc_execution_state.py new file mode 100644 index 000000000..8cfd8fa0c --- /dev/null +++ b/hathor/transaction/nc_execution_state.py @@ -0,0 +1,23 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import StrEnum, auto, unique + + +@unique +class NCExecutionState(StrEnum): + PENDING = auto() # aka, not even tried to execute it + SUCCESS = auto() # execution was successful + FAILURE = auto() # execution failed and the transaction is voided + SKIPPED = auto() # execution was skipped, usually because the transaction was voided diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index 897bd0ead..eda48c0ff 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -89,6 +89,7 @@ def render_POST(self, request): # conservative estimate of the input data size to estimate a valid weight tx_input.data = b'\0' * 107 tx.weight = self.manager.daa.minimum_tx_weight(fake_signed_tx) + tx.init_static_metadata_from_storage(self.manager._settings, self.manager.tx_storage) self._verify_unsigned_skip_pow(tx) if tx.is_double_spending(): @@ -113,8 +114,8 @@ def _verify_unsigned_skip_pow(self, tx: Transaction) -> None: verifiers.vertex.verify_number_of_outputs(tx) verifiers.vertex.verify_outputs(tx) verifiers.tx.verify_output_token_indexes(tx) - verifiers.vertex.verify_sigops_output(tx) - verifiers.tx.verify_sigops_input(tx) + verifiers.vertex.verify_sigops_output(tx, enable_checkdatasig_count=True) + verifiers.tx.verify_sigops_input(tx, enable_checkdatasig_count=True) # need to run verify_inputs first to check if all inputs exist verifiers.tx.verify_inputs(tx, skip_script=True) verifiers.vertex.verify_parents(tx) diff --git a/hathor/transaction/resources/mining.py b/hathor/transaction/resources/mining.py index 1cd20bfdc..abe086cbb 100644 --- a/hathor/transaction/resources/mining.py +++ b/hathor/transaction/resources/mining.py @@ -144,7 +144,7 @@ def render_POST(self, request): 'per-ip': [ { 'rate': '1r/s', - 'burst': 1, + 'burst': 3, 'delay': 3, } ] diff --git a/hathor/transaction/scripts/__init__.py b/hathor/transaction/scripts/__init__.py index e7f88f72c..b0ccd00a9 100644 --- a/hathor/transaction/scripts/__init__.py +++ b/hathor/transaction/scripts/__init__.py @@ -13,9 +13,9 @@ # limitations under the License. from hathor.transaction.scripts.construct import ( + SigopCounter, create_base_script, create_output_script, - get_sigops_count, parse_address_script, ) from hathor.transaction.scripts.execute import ScriptExtras, script_eval @@ -32,9 +32,9 @@ 'NanoContractMatchValues', 'HathorScript', 'ScriptExtras', + 'SigopCounter', 'parse_address_script', 'create_base_script', 'create_output_script', 'script_eval', - 'get_sigops_count', ] diff --git a/hathor/transaction/scripts/construct.py b/hathor/transaction/scripts/construct.py index 94eee27b7..077813646 100644 --- a/hathor/transaction/scripts/construct.py +++ b/hathor/transaction/scripts/construct.py @@ -177,8 +177,18 @@ def parse_script_ops(data: bytes) -> Generator[_ScriptOperation, None, None]: yield _ScriptOperation(opcode=op, position=last_pos, data=None) -def count_sigops(data: bytes) -> int: - """ Count number of signature operations on the script +class SigopCounter: + def __init__( + self, + *, + max_multisig_pubkeys: int, + enable_checkdatasig_count: bool, + ) -> None: + self.max_multisig_pubkeys = max_multisig_pubkeys + self.enable_checkdatasig_count = enable_checkdatasig_count + + def count_sigops(self, data: bytes) -> int: + """ Count number of signature operations on the script :param data: script to parse that contains data and opcodes :type data: bytes @@ -190,40 +200,43 @@ def count_sigops(data: bytes) -> int: :return: number of signature operations the script would do if it was executed :rtype: int - """ - from hathor.transaction.scripts import Opcode - from hathor.transaction.scripts.execute import decode_opn, get_script_op - settings = get_global_settings() - n_ops: int = 0 - data_len: int = len(data) - pos: int = 0 - last_opcode: Union[int, None] = None - - while pos < data_len: - opcode, pos = get_script_op(pos, data) - - if opcode == Opcode.OP_CHECKSIG: - n_ops += 1 - elif opcode == Opcode.OP_CHECKMULTISIG: - assert isinstance(last_opcode, int) - if Opcode.OP_0 <= last_opcode <= Opcode.OP_16: - # Conventional OP_CHECKMULTISIG: ... ... - # this function will run op_checksig with each pair (sign_x, pubkey_y) until all signatures - # are verified so the worst case scenario is n op_checksig and the best m op_checksig - # we know m <= n, so for now we are counting n operations (the upper limit) - n_ops += decode_opn(last_opcode) - else: - # Unconventional OP_CHECKMULTISIG: - # We count the limit for PUBKEYS, since this is also the upper limit on signature operations - # that any op_checkmultisig would run - n_ops += settings.MAX_MULTISIG_PUBKEYS - last_opcode = opcode - return n_ops - - -def get_sigops_count(data: bytes, output_script: Optional[bytes] = None) -> int: - """ Count number of signature operations on the script, if it's an input script and the spent output is passed - check the spent output for MultiSig and count operations on redeem_script too + """ + from hathor.transaction.scripts import Opcode + from hathor.transaction.scripts.execute import decode_opn, get_script_op + n_ops: int = 0 + data_len: int = len(data) + pos: int = 0 + last_opcode: Union[int, None] = None + + while pos < data_len: + opcode, pos = get_script_op(pos, data) + + match opcode: + case Opcode.OP_CHECKSIG: + n_ops += 1 + case Opcode.OP_CHECKMULTISIG: + if last_opcode is not None and Opcode.OP_0 <= last_opcode <= Opcode.OP_16: + # Conventional OP_CHECKMULTISIG: ... ... + # this function will run op_checksig with each pair (sign_x, pubkey_y) until + # all signatures are verified so the worst case scenario is n op_checksig and the best m + # op_checksig we know m <= n, so for now we are counting n operations (the upper limit) + n_ops += decode_opn(last_opcode) + else: + # Unconventional OP_CHECKMULTISIG: + # We count the limit for PUBKEYS, since this is also the upper limit on signature operations + # that any op_checkmultisig would run + n_ops += self.max_multisig_pubkeys + case Opcode.OP_CHECKDATASIG: + if self.enable_checkdatasig_count: + n_ops += 1 + last_opcode = opcode + return n_ops + + def get_sigops_count(self, data: bytes, output_script: Optional[bytes] = None) -> int: + """ Count number of signature operations on the script. + + If it's an input script and the spent output is passed check the spent output for MultiSig and count operations + on redeem_script too. :param data: script to parse with opcodes :type data: bytes @@ -236,17 +249,17 @@ def get_sigops_count(data: bytes, output_script: Optional[bytes] = None) -> int: :return: number of signature operations the script would do if it was executed :rtype: int - """ - # If validating an input, should check the spent_tx for MultiSig - if output_script is not None: - # If it's multisig we have to validate the redeem_script sigop count - from hathor.transaction.scripts import MultiSig - if MultiSig.re_match.search(output_script): - multisig_data = MultiSig.get_multisig_data(data) - # input_script + redeem_script - return count_sigops(multisig_data) - - return count_sigops(data) + """ + # If validating an input, should check the spent_tx for MultiSig + if output_script is not None: + # If it's P2PSH we have to validate the redeem_script sigop count + from hathor.transaction.scripts import MultiSig + if MultiSig.re_match.search(output_script): + multisig_data = MultiSig.get_multisig_data(data) + # input_script + redeem_script + return self.count_sigops(multisig_data) + + return self.count_sigops(data) def get_pushdata(data: bytes) -> bytes: diff --git a/hathor/transaction/scripts/execute.py b/hathor/transaction/scripts/execute.py index 23109afbc..b19ab6c0a 100644 --- a/hathor/transaction/scripts/execute.py +++ b/hathor/transaction/scripts/execute.py @@ -13,14 +13,20 @@ # limitations under the License. import struct +from dataclasses import dataclass from typing import NamedTuple, Optional, Union from hathor.transaction import BaseTransaction, Transaction, TxInput from hathor.transaction.exceptions import DataIndexError, FinalStackInvalid, InvalidScriptError, OutOfData -class ScriptExtras(NamedTuple): +@dataclass(slots=True, frozen=True, kw_only=True) +class ScriptExtras: tx: Transaction + + +@dataclass(slots=True, frozen=True, kw_only=True) +class UtxoScriptExtras(ScriptExtras): txin: TxInput spent_tx: BaseTransaction @@ -103,10 +109,15 @@ def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> No :raises ScriptError: if script verification fails """ - input_data = txin.data - output_script = spent_tx.outputs[txin.index].script + raw_script_eval( + input_data=txin.data, + output_script=spent_tx.outputs[txin.index].script, + extras=UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx), + ) + + +def raw_script_eval(*, input_data: bytes, output_script: bytes, extras: ScriptExtras) -> None: log: list[str] = [] - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) from hathor.transaction.scripts import MultiSig if MultiSig.re_match.search(output_script): @@ -115,12 +126,12 @@ def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> No # we can't use input_data + output_script because it will end with an invalid stack # i.e. the signatures will still be on the stack after ouput_script is executed redeem_script_pos = MultiSig.get_multisig_redeem_script_pos(input_data) - full_data = txin.data[redeem_script_pos:] + output_script + full_data = input_data[redeem_script_pos:] + output_script execute_eval(full_data, log, extras) # Second, we need to validate that the signatures on the input_data solves the redeem_script # we pop and append the redeem_script to the input_data and execute it - multisig_data = MultiSig.get_multisig_data(extras.txin.data) + multisig_data = MultiSig.get_multisig_data(input_data) execute_eval(multisig_data, log, extras) else: # merge input_data and output_script diff --git a/hathor/transaction/scripts/opcode.py b/hathor/transaction/scripts/opcode.py index 460c66821..eddaecfbb 100644 --- a/hathor/transaction/scripts/opcode.py +++ b/hathor/transaction/scripts/opcode.py @@ -37,7 +37,14 @@ TimeLocked, VerifyFailed, ) -from hathor.transaction.scripts.execute import Stack, binary_to_int, decode_opn, get_data_value, get_script_op +from hathor.transaction.scripts.execute import ( + Stack, + UtxoScriptExtras, + binary_to_int, + decode_opn, + get_data_value, + get_script_op, +) from hathor.transaction.scripts.script_context import ScriptContext @@ -178,6 +185,7 @@ def op_greaterthan_timestamp(context: ScriptContext) -> None: buf = context.stack.pop() assert isinstance(buf, bytes) (timelock,) = struct.unpack('!I', buf) + assert isinstance(context.extras, UtxoScriptExtras) if context.extras.tx.timestamp <= timelock: raise TimeLocked('The output is locked until {}'.format( datetime.datetime.fromtimestamp(timelock).strftime("%m/%d/%Y %I:%M:%S %p"))) @@ -497,6 +505,7 @@ def op_find_p2pkh(context: ScriptContext) -> None: raise MissingStackItems('OP_FIND_P2PKH: empty stack') from hathor.transaction.scripts import P2PKH + assert isinstance(context.extras, UtxoScriptExtras) spent_tx = context.extras.spent_tx txin = context.extras.txin tx = context.extras.tx diff --git a/hathor/transaction/static_metadata.py b/hathor/transaction/static_metadata.py index 03855479a..2e61ea17d 100644 --- a/hathor/transaction/static_metadata.py +++ b/hathor/transaction/static_metadata.py @@ -101,7 +101,7 @@ def create( height=height, min_height=min_height, feature_activation_bit_counts=feature_activation_bit_counts, - feature_states={}, # This will be populated in a future PR + feature_states={}, # This will be populated in a future PR, it's currently still in normal metadata ) @staticmethod diff --git a/hathor/transaction/storage/__init__.py b/hathor/transaction/storage/__init__.py index 4fbdd6ae7..c0a060722 100644 --- a/hathor/transaction/storage/__init__.py +++ b/hathor/transaction/storage/__init__.py @@ -13,18 +13,12 @@ # limitations under the License. from hathor.transaction.storage.cache_storage import TransactionCacheStorage -from hathor.transaction.storage.memory_storage import TransactionMemoryStorage +from hathor.transaction.storage.rocksdb_storage import TransactionRocksDBStorage from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.transaction.storage.vertex_storage_protocol import VertexStorageProtocol -try: - from hathor.transaction.storage.rocksdb_storage import TransactionRocksDBStorage -except ImportError: - pass - __all__ = [ 'TransactionStorage', - 'TransactionMemoryStorage', 'TransactionCacheStorage', 'TransactionRocksDBStorage', 'VertexStorageProtocol' diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 965a61179..5a82a42df 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from collections import OrderedDict -from typing import Any, Iterator, Optional +from typing import TYPE_CHECKING, Any, Iterator, Optional from twisted.internet import threads from typing_extensions import override -from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager from hathor.reactor import ReactorProtocol as Reactor from hathor.transaction import BaseTransaction @@ -26,6 +27,10 @@ from hathor.transaction.storage.transaction_storage import BaseTransactionStorage from hathor.transaction.storage.tx_allow_scope import TxAllowScope +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.storage import NCStorageFactory + class TransactionCacheStorage(BaseTransactionStorage): """Caching storage to be used 'on top' of other storages. @@ -41,7 +46,8 @@ def __init__( interval: int = 5, capacity: int = 10000, *, - settings: HathorSettings, + settings: 'HathorSettings', + nc_storage_factory: NCStorageFactory, indexes: Optional[IndexesManager], _clone_if_needed: bool = False, ) -> None: @@ -79,7 +85,7 @@ def __init__( # we need to use only one weakref dict, so we must first initialize super, and then # attribute the same weakref for both. - super().__init__(indexes=indexes, settings=settings) + super().__init__(indexes=indexes, settings=settings, nc_storage_factory=nc_storage_factory) self._tx_weakref = store._tx_weakref # XXX: just to make sure this isn't being used anywhere, setters/getters should be used instead del self._allow_scope diff --git a/hathor/transaction/storage/memory_storage.py b/hathor/transaction/storage/memory_storage.py index 31742d823..e69de29bb 100644 --- a/hathor/transaction/storage/memory_storage.py +++ b/hathor/transaction/storage/memory_storage.py @@ -1,127 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import Any, Iterator, Optional, TypeVar - -from typing_extensions import override - -from hathor.conf.settings import HathorSettings -from hathor.indexes import IndexesManager -from hathor.transaction import BaseTransaction -from hathor.transaction.storage.exceptions import TransactionDoesNotExist -from hathor.transaction.storage.migrations import MigrationState -from hathor.transaction.storage.transaction_storage import BaseTransactionStorage -from hathor.transaction.transaction_metadata import TransactionMetadata - -_Clonable = TypeVar('_Clonable', BaseTransaction, TransactionMetadata) - - -class TransactionMemoryStorage(BaseTransactionStorage): - def __init__( - self, - indexes: Optional[IndexesManager] = None, - *, - settings: HathorSettings, - _clone_if_needed: bool = False, - ) -> None: - """ - :param _clone_if_needed: *private parameter*, defaults to True, controls whether to clone - transaction/blocks/metadata when returning those objects. - :type _clone_if_needed: bool - """ - self.transactions: dict[bytes, BaseTransaction] = {} - self.metadata: dict[bytes, TransactionMetadata] = {} - # Store custom key/value attributes - self.attributes: dict[str, Any] = {} - self._clone_if_needed = _clone_if_needed - super().__init__(indexes=indexes, settings=settings) - - def _check_and_set_network(self) -> None: - # XXX: does not apply to memory storage, can safely be ignored - pass - - def _check_and_apply_migrations(self): - # XXX: does not apply to memory storage, can safely be ignored - pass - - def _clone(self, x: _Clonable) -> _Clonable: - if self._clone_if_needed: - return x.clone() - else: - return x - - def get_migration_state(self, migration_name: str) -> MigrationState: - # XXX: it will always return COMPLETED, migrations don't apply to memory storage - return MigrationState.COMPLETED - - def set_migration_state(self, migration_name: str, state: MigrationState) -> None: - # XXX: do nothing, migrations have no effect on memory storage - pass - - def remove_transaction(self, tx: BaseTransaction) -> None: - super().remove_transaction(tx) - self.transactions.pop(tx.hash, None) - self.metadata.pop(tx.hash, None) - - def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None: - super().save_transaction(tx, only_metadata=only_metadata) - self._save_transaction(tx, only_metadata=only_metadata) - - def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None: - if not only_metadata: - self.transactions[tx.hash] = self._clone(tx) - meta = getattr(tx, '_metadata', None) - if meta: - self.metadata[tx.hash] = self._clone(meta) - - @override - def _save_static_metadata(self, tx: BaseTransaction) -> None: - # We do not need to explicitly save the static metadata as the tx object already holds it in memory - pass - - def transaction_exists(self, hash_bytes: bytes) -> bool: - return hash_bytes in self.transactions - - def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction: - if hash_bytes in self.transactions: - tx = self._clone(self.transactions[hash_bytes]) - if hash_bytes in self.metadata: - tx._metadata = self._clone(self.metadata[hash_bytes]) - assert tx._metadata is not None - assert tx._static_metadata is not None - return tx - else: - raise TransactionDoesNotExist(hash_bytes.hex()) - - def _get_all_transactions(self) -> Iterator[BaseTransaction]: - for tx in self.transactions.values(): - tx = self._clone(tx) - if tx.hash in self.metadata: - tx._metadata = self._clone(self.metadata[tx.hash]) - yield tx - - def _get_local_vertices_count(self) -> int: - return len(self.transactions) - - def is_empty(self) -> bool: - return self._get_local_vertices_count() <= 3 - - def add_value(self, key: str, value: str) -> None: - self.attributes[key] = value - - def remove_value(self, key: str) -> None: - self.attributes.pop(key, None) - - def get_value(self, key: str) -> Optional[str]: - return self.attributes.get(key) diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index 5b97cf741..63c3d4fda 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -12,12 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger from typing_extensions import override -from hathor.conf.settings import HathorSettings from hathor.indexes import IndexesManager from hathor.storage import RocksDBStorage from hathor.transaction.static_metadata import VertexStaticMetadata @@ -29,6 +30,8 @@ if TYPE_CHECKING: import rocksdb + from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.storage import NCStorageFactory from hathor.transaction import BaseTransaction logger = get_logger() @@ -52,8 +55,9 @@ def __init__( rocksdb_storage: RocksDBStorage, indexes: Optional[IndexesManager] = None, *, - settings: HathorSettings, + settings: 'HathorSettings', vertex_parser: VertexParser, + nc_storage_factory: NCStorageFactory, ) -> None: self._cf_tx = rocksdb_storage.get_or_create_column_family(_CF_NAME_TX) self._cf_meta = rocksdb_storage.get_or_create_column_family(_CF_NAME_META) @@ -64,7 +68,7 @@ def __init__( self._rocksdb_storage = rocksdb_storage self._db = rocksdb_storage.get_db() self.vertex_parser = vertex_parser - super().__init__(indexes=indexes, settings=settings) + super().__init__(indexes=indexes, settings=settings, nc_storage_factory=nc_storage_factory) def _load_from_bytes(self, tx_data: bytes, meta_data: bytes) -> 'BaseTransaction': from hathor.transaction.transaction_metadata import TransactionMetadata diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index ab06157f5..e7a9ebe6b 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -12,18 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import hashlib from abc import ABC, abstractmethod, abstractproperty from collections import deque from contextlib import AbstractContextManager from threading import Lock -from typing import Any, Iterator, NamedTuple, Optional, cast +from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Optional, cast from weakref import WeakValueDictionary from intervaltree.interval import Interval from structlog import get_logger -from hathor.conf.settings import HathorSettings from hathor.execution_manager import ExecutionManager from hathor.indexes import IndexesManager from hathor.indexes.height_index import HeightInfo @@ -50,6 +51,14 @@ from hathor.types import VertexId from hathor.verification.transaction_verifier import TransactionVerifier +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + from hathor.nanocontracts import OnChainBlueprint + from hathor.nanocontracts.blueprint import Blueprint + from hathor.nanocontracts.catalog import NCBlueprintCatalog + from hathor.nanocontracts.storage import NCBlockStorage, NCContractStorage, NCStorageFactory + from hathor.nanocontracts.types import BlueprintId, ContractId + cpu = get_cpu_profiler() # these are the timestamp values to be used when resetting them, 1 is used for the node instead of 0, so it can be @@ -73,15 +82,13 @@ class TransactionStorage(ABC): pubsub: Optional[PubSubManager] indexes: Optional[IndexesManager] _latest_n_height_tips: list[HeightInfo] + nc_catalog: Optional['NCBlueprintCatalog'] = None log = get_logger() # Key storage attribute to save if the network stored is the expected network _network_attribute: str = 'network' - # Key storage attribute to save if the full node is running a full verification - _running_full_verification_attribute: str = 'running_full_verification' - # Key storage attribute to save if the manager is running _manager_running_attribute: str = 'manager_running' @@ -100,8 +107,9 @@ class TransactionStorage(ABC): _migrations: list[BaseMigration] - def __init__(self, *, settings: HathorSettings) -> None: + def __init__(self, *, settings: HathorSettings, nc_storage_factory: NCStorageFactory) -> None: self._settings = settings + self._nc_storage_factory = nc_storage_factory # Weakref is used to guarantee that there is only one instance of each transaction in memory. self._tx_weakref: WeakValueDictionary[bytes, BaseTransaction] = WeakValueDictionary() self._tx_weakref_disabled: bool = False @@ -905,22 +913,6 @@ def set_network(self, network: str) -> None: """ return self.add_value(self._network_attribute, network) - def start_full_verification(self) -> None: - """ Save full verification on storage - """ - self.add_value(self._running_full_verification_attribute, '1') - - def finish_full_verification(self) -> None: - """ Remove from storage that the full node is initializing with a full verification - """ - self.remove_value(self._running_full_verification_attribute) - - def is_running_full_verification(self) -> bool: - """ Return if the full node is initializing with a full verification - or was running a full verification and was stopped in the middle - """ - return self.get_value(self._running_full_verification_attribute) == '1' - def start_running_manager(self, execution_manager: ExecutionManager) -> None: """ Save on storage that manager is running """ @@ -1146,6 +1138,85 @@ def partial_vertex_exists(self, vertex_id: VertexId) -> bool: with self.allow_partially_validated_context(): return self.transaction_exists(vertex_id) + def get_nc_block_storage(self, block: Block) -> NCBlockStorage: + """Return a block storage for the given block.""" + return self._nc_storage_factory.get_block_storage_from_block(block) + + def get_nc_storage(self, block: Block, contract_id: ContractId) -> NCContractStorage: + """Return a contract storage with the contract state at a given block.""" + from hathor.nanocontracts.types import ContractId, VertexId as NCVertexId + if not block.is_genesis: + block_storage = self._nc_storage_factory.get_block_storage_from_block(block) + else: + block_storage = self._nc_storage_factory.get_empty_block_storage() + + return block_storage.get_contract_storage(ContractId(NCVertexId(contract_id))) + + def _get_blueprint(self, blueprint_id: BlueprintId) -> type[Blueprint] | OnChainBlueprint: + assert self.nc_catalog is not None + + if blueprint_class := self.nc_catalog.get_blueprint_class(blueprint_id): + return blueprint_class + + self.log.debug( + 'blueprint_id not in the catalog, looking for on-chain blueprint', + blueprint_id=blueprint_id.hex() + ) + return self.get_on_chain_blueprint(blueprint_id) + + def get_blueprint_source(self, blueprint_id: BlueprintId) -> str: + """Returns the source code associated with the given blueprint_id. + + The blueprint class could be in the catalog (first search), or it could be the tx_id of an on-chain blueprint. + + A point of difference is that an OCB will have a `__blueprint__ = BlueprintName` line, where a built-in + blueprint will not. + """ + import inspect + + from hathor.nanocontracts import OnChainBlueprint + + blueprint = self._get_blueprint(blueprint_id) + if isinstance(blueprint, OnChainBlueprint): + return self.get_on_chain_blueprint(blueprint_id).code.text + else: + module = inspect.getmodule(blueprint) + assert module is not None + return inspect.getsource(module) + + def get_blueprint_class(self, blueprint_id: BlueprintId) -> type[Blueprint]: + """Returns the blueprint class associated with the given blueprint_id. + + The blueprint class could be in the catalog (first search), or it could be the tx_id of an on-chain blueprint. + """ + from hathor.nanocontracts import OnChainBlueprint + blueprint = self._get_blueprint(blueprint_id) + if isinstance(blueprint, OnChainBlueprint): + return blueprint.get_blueprint_class() + else: + return blueprint + + def get_on_chain_blueprint(self, blueprint_id: BlueprintId) -> OnChainBlueprint: + """Return an on-chain blueprint transaction.""" + from hathor.nanocontracts import OnChainBlueprint + from hathor.nanocontracts.exception import ( + BlueprintDoesNotExist, + OCBBlueprintNotConfirmed, + OCBInvalidBlueprintVertexType, + ) + try: + blueprint_tx = self.get_transaction(blueprint_id) + except TransactionDoesNotExist: + self.log.debug('no transaction with the given id found', blueprint_id=blueprint_id.hex()) + raise BlueprintDoesNotExist(blueprint_id.hex()) + if not isinstance(blueprint_tx, OnChainBlueprint): + raise OCBInvalidBlueprintVertexType(blueprint_id.hex()) + tx_meta = blueprint_tx.get_metadata() + if tx_meta.voided_by or not tx_meta.first_block: + raise OCBBlueprintNotConfirmed(blueprint_id.hex()) + # XXX: maybe use N blocks confirmation, like reward-locks + return blueprint_tx + class BaseTransactionStorage(TransactionStorage): indexes: Optional[IndexesManager] @@ -1156,8 +1227,9 @@ def __init__( pubsub: Optional[Any] = None, *, settings: HathorSettings, + nc_storage_factory: NCStorageFactory, ) -> None: - super().__init__(settings=settings) + super().__init__(settings=settings, nc_storage_factory=nc_storage_factory) # Pubsub is used to publish tx voided and winner but it's optional self.pubsub = pubsub diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index 629050197..b603d3053 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -12,7 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -from struct import error as StructError, pack +from dataclasses import dataclass +from struct import pack from typing import Any, Optional from typing_extensions import override @@ -21,7 +22,7 @@ from hathor.transaction.base_transaction import TxInput, TxOutput, TxVersion from hathor.transaction.storage import TransactionStorage # noqa: F401 from hathor.transaction.transaction import TokenInfo, Transaction -from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len +from hathor.transaction.util import VerboseCallback, decode_string_utf8, int_to_bytes, unpack, unpack_len from hathor.types import TokenUid # Signal bits (B), version (B), inputs len (B), outputs len (B) @@ -35,6 +36,13 @@ TOKEN_INFO_VERSION = 1 +@dataclass(slots=True, frozen=True, kw_only=True) +class TokenDescription: + token_id: bytes + token_name: str + token_symbol: str + + class TokenCreationTransaction(Transaction): def __init__( self, @@ -141,13 +149,13 @@ def get_funds_struct(self) -> bytes: return struct_bytes - def get_sighash_all(self) -> bytes: + def get_sighash_all(self, *, skip_cache: bool = False) -> bytes: """ Returns a serialization of the inputs and outputs without including any other field :return: Serialization of the inputs, outputs and tokens :rtype: bytes """ - if self._sighash_cache: + if not skip_cache and self._sighash_cache: return self._sighash_cache struct_bytes = pack( @@ -169,6 +177,10 @@ def get_sighash_all(self) -> bytes: struct_bytes += b''.join(tx_outputs) struct_bytes += self.serialize_token_info() + + for header in self.headers: + struct_bytes += header.get_sighash_bytes() + self._sighash_cache = struct_bytes return struct_bytes @@ -235,16 +247,10 @@ def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: token_dict = super()._get_token_info_from_inputs() # we add the created token's info to token_dict, as the creation tx allows for mint/melt - token_dict[self.hash] = TokenInfo(0, True, True) + token_dict[self.hash] = TokenInfo( + amount=0, + can_mint=True, + can_melt=True, + ) return token_dict - - -def decode_string_utf8(encoded: bytes, key: str) -> str: - """ Raises StructError in case it's not a valid utf-8 string - """ - try: - decoded = encoded.decode('utf-8') - return decoded - except UnicodeDecodeError: - raise StructError('{} must be a valid utf-8 string.'.format(key)) diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index a51eaeffe..27296ef1f 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -15,16 +15,19 @@ from __future__ import annotations import hashlib +from dataclasses import dataclass from struct import pack from typing import TYPE_CHECKING, Any, NamedTuple, Optional -from typing_extensions import override +from typing_extensions import Self, override from hathor.checkpoint import Checkpoint +from hathor.crypto.util import get_address_b58_from_bytes from hathor.exception import InvalidNewTransaction from hathor.transaction import TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE, GenericVertex from hathor.transaction.exceptions import InvalidToken +from hathor.transaction.headers import NanoHeader from hathor.transaction.static_metadata import TransactionStaticMetadata from hathor.transaction.util import VerboseCallback, unpack, unpack_len from hathor.types import TokenUid, VertexId @@ -40,11 +43,21 @@ _SIGHASH_ALL_FORMAT_STRING = '!BBBBB' -class TokenInfo(NamedTuple): +@dataclass(slots=True, kw_only=True) +class TokenInfo: amount: int can_mint: bool can_melt: bool + @staticmethod + def get_default() -> TokenInfo: + """Create a default, emtpy token info.""" + return TokenInfo( + amount=0, + can_mint=False, + can_melt=False, + ) + class RewardLockedInfo(NamedTuple): block_hash: VertexId @@ -53,6 +66,8 @@ class RewardLockedInfo(NamedTuple): class Transaction(GenericVertex[TransactionStaticMetadata]): + __slots__ = ['tokens', '_sighash_cache', '_sighash_data_cache'] + SERIALIZATION_NONCE_SIZE = 4 def __init__( @@ -91,6 +106,11 @@ def __init__( self._sighash_cache: Optional[bytes] = None self._sighash_data_cache: Optional[bytes] = None + def clear_sighash_cache(self) -> None: + """Clear caches related to sighash calculation.""" + self._sighash_cache = None + self._sighash_data_cache = None + @property def is_block(self) -> bool: """Returns true if this is a block""" @@ -101,21 +121,38 @@ def is_transaction(self) -> bool: """Returns true if this is a transaction""" return True + def is_nano_contract(self) -> bool: + try: + self.get_nano_header() + except ValueError: + return False + else: + return True + + def get_nano_header(self) -> NanoHeader: + """Return the NanoHeader or raise ValueError.""" + for header in self.headers: + if isinstance(header, NanoHeader): + return header + raise ValueError('nano header not found') + @classmethod def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionStorage'] = None, - *, verbose: VerboseCallback = None) -> 'Transaction': - tx = cls() + *, verbose: VerboseCallback = None) -> Self: + tx = cls(storage=storage) buf = tx.get_fields_from_struct(struct_bytes, verbose=verbose) - if len(buf) != cls.SERIALIZATION_NONCE_SIZE: + if len(buf) < cls.SERIALIZATION_NONCE_SIZE: raise ValueError('Invalid sequence of bytes') [tx.nonce, ], buf = unpack('!I', buf) if verbose: verbose('nonce', tx.nonce) + while buf: + buf = tx.get_header_from_bytes(buf, verbose=verbose) + tx.update_hash() - tx.storage = storage return tx @@ -184,7 +221,7 @@ def get_funds_struct(self) -> bytes: return struct_bytes - def get_sighash_all(self) -> bytes: + def get_sighash_all(self, *, skip_cache: bool = False) -> bytes: """Return a serialization of the inputs, outputs and tokens without including any other field :return: Serialization of the inputs, outputs and tokens @@ -193,7 +230,7 @@ def get_sighash_all(self) -> bytes: # This method does not depend on the input itself, however we call it for each one to sign it. # For transactions that have many inputs there is a significant decrease on the verify time # when using this cache, so we call this method only once. - if self._sighash_cache: + if not skip_cache and self._sighash_cache: return self._sighash_cache struct_bytes = bytearray( @@ -216,6 +253,9 @@ def get_sighash_all(self) -> bytes: for tx_output in self.outputs: struct_bytes += bytes(tx_output) + for header in self.headers: + struct_bytes += header.get_sighash_bytes() + ret = bytes(struct_bytes) self._sighash_cache = ret return ret @@ -242,11 +282,36 @@ def get_token_uid(self, index: int) -> TokenUid: return self._settings.HATHOR_TOKEN_UID return self.tokens[index - 1] + def get_related_addresses(self) -> set[str]: + ret = super().get_related_addresses() + if self.is_nano_contract(): + nano_header = self.get_nano_header() + ret.add(get_address_b58_from_bytes(nano_header.nc_address)) + return ret + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: json = super().to_json(decode_script=decode_script, include_metadata=include_metadata) json['tokens'] = [h.hex() for h in self.tokens] + + if self.is_nano_contract(): + nano_header = self.get_nano_header() + json['nc_id'] = nano_header.get_contract_id().hex() + json['nc_seqnum'] = nano_header.nc_seqnum + json['nc_blueprint_id'] = nano_header.get_blueprint_id().hex() + json['nc_method'] = nano_header.nc_method + json['nc_args'] = nano_header.nc_args_bytes.hex() + json['nc_address'] = get_address_b58_from_bytes(nano_header.nc_address) + json['nc_context'] = nano_header.get_context().to_json() + return json + def to_json_extended(self) -> dict[str, Any]: + json_extended = super().to_json_extended() + if self.is_nano_contract(): + json = self.to_json() + return {**json, **json_extended} + return json_extended + def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: assert self.storage is not None if self.is_genesis: @@ -264,34 +329,60 @@ def get_complete_token_info(self) -> dict[TokenUid, TokenInfo]: Get a complete token info dict, including data from both inputs and outputs. """ token_dict = self._get_token_info_from_inputs() + self._update_token_info_from_nano_actions(token_dict=token_dict) + # This one must be called last so token_dict already contains all tokens in inputs and nano actions. self._update_token_info_from_outputs(token_dict=token_dict) return token_dict + def get_minimum_number_of_inputs(self) -> int: + """Return the minimum number of inputs for this transaction. + This is used by the verification services.""" + if self.is_nano_contract(): + return 0 + return 1 + + def _update_token_info_from_nano_actions(self, *, token_dict: dict[TokenUid, TokenInfo]) -> None: + """Update token_dict with nano actions.""" + if not self.is_nano_contract(): + return + + from hathor.nanocontracts.balance_rules import BalanceRules + nano_header = self.get_nano_header() + + for action in nano_header.get_actions(): + rules = BalanceRules.get_rules(self._settings, action) + rules.verification_rule(token_dict) + def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ token_dict: dict[TokenUid, TokenInfo] = {} - default_info: TokenInfo = TokenInfo(0, False, False) - # add HTR to token dict due to tx melting tokens: there might be an HTR output without any # input or authority. If we don't add it, an error will be raised when iterating through # the outputs of such tx (error: 'no token creation and no inputs for token 00') - token_dict[self._settings.HATHOR_TOKEN_UID] = TokenInfo(0, False, False) + token_dict[self._settings.HATHOR_TOKEN_UID] = TokenInfo.get_default() for tx_input in self.inputs: spent_tx = self.get_spent_tx(tx_input) spent_output = spent_tx.outputs[tx_input.index] token_uid = spent_tx.get_token_uid(spent_output.get_token_index()) - (amount, can_mint, can_melt) = token_dict.get(token_uid, default_info) + token_info = token_dict.get(token_uid, TokenInfo.get_default()) + amount = token_info.amount + can_mint = token_info.can_mint + can_melt = token_info.can_melt if spent_output.is_token_authority(): can_mint = can_mint or spent_output.can_mint_token() can_melt = can_melt or spent_output.can_melt_token() else: amount -= spent_output.value - token_dict[token_uid] = TokenInfo(amount, can_mint, can_melt) + token_dict[token_uid] = TokenInfo( + amount=amount, + can_mint=can_mint, + can_melt=can_melt, + ) return token_dict @@ -308,23 +399,20 @@ def _update_token_info_from_outputs(self, *, token_dict: dict[TokenUid, TokenInf token_info = token_dict.get(token_uid) if token_info is None: raise InvalidToken('no inputs for token {}'.format(token_uid.hex())) + + # for authority outputs, make sure the same capability (mint/melt) was present in the inputs + if tx_output.can_mint_token() and not token_info.can_mint: + raise InvalidToken(f'output at index {index} has mint authority, but no input has it') + if tx_output.can_melt_token() and not token_info.can_melt: + raise InvalidToken(f'output at index {index} has melt authority, but no input has it') + + if tx_output.is_token_authority(): + # make sure we only have authorities that we know of + if tx_output.value > TxOutput.ALL_AUTHORITIES: + raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) else: - # for authority outputs, make sure the same capability (mint/melt) was present in the inputs - if tx_output.can_mint_token() and not token_info.can_mint: - raise InvalidToken('output has mint authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - if tx_output.can_melt_token() and not token_info.can_melt: - raise InvalidToken('output has melt authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - - if tx_output.is_token_authority(): - # make sure we only have authorities that we know of - if tx_output.value > TxOutput.ALL_AUTHORITIES: - raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) - else: - # for regular outputs, just subtract from the total amount - sum_tokens = token_info.amount + tx_output.value - token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) + # for regular outputs, just subtract from the total amount + token_dict[token_uid].amount = token_info.amount + tx_output.value def is_double_spending(self) -> bool: """ Iterate through inputs to check if they were already spent diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index fa9d2c977..bdbec9c84 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -20,6 +20,8 @@ from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.types import MetaNCCallRecord from hathor.transaction.validation_state import ValidationState from hathor.util import json_dumpb, json_loadb, practically_equal from hathor.utils.weight import work_to_weight @@ -46,6 +48,11 @@ class TransactionMetadata: first_block: Optional[bytes] validation: ValidationState + # Used to store the root node id of the contract tree related to this block. + nc_block_root_id: Optional[bytes] + nc_execution: Optional[NCExecutionState] + nc_calls: Optional[list[MetaNCCallRecord]] + # A dict of features in the feature activation process and their respective state. Must only be used by Blocks, # is None otherwise. This is only used for caching, so it can be safely cleared up, as it would be recalculated # when necessary. @@ -63,6 +70,7 @@ def __init__( hash: Optional[bytes] = None, accumulated_weight: int = 0, score: int = 0, + nc_block_root_id: Optional[bytes] = None, settings: HathorSettings | None = None, ) -> None: from hathor.transaction.genesis import is_genesis @@ -71,6 +79,11 @@ def __init__( self.hash = hash self._tx_ref = None + # Nano contract metadata + self.nc_block_root_id = nc_block_root_id + self.nc_execution = None + self.nc_calls = None + # Tx outputs that have been spent. # The key is the output index, while the value is a set of the transactions which spend the output. self.spent_outputs = spent_outputs or defaultdict(list) @@ -176,7 +189,7 @@ def __eq__(self, other: Any) -> bool: return False for field in ['hash', 'conflict_with', 'voided_by', 'received_by', 'children', 'accumulated_weight', 'twins', 'score', 'first_block', 'validation', - 'feature_states']: + 'feature_states', 'nc_block_root_id', 'nc_calls', 'nc_execution']: if (getattr(self, field) or None) != (getattr(other, field) or None): return False @@ -231,6 +244,9 @@ def to_storage_json(self) -> dict[str, Any]: else: data['first_block'] = None data['validation'] = self.validation.name.lower() + data['nc_block_root_id'] = self.nc_block_root_id.hex() if self.nc_block_root_id else None + data['nc_calls'] = [x.to_json() for x in self.nc_calls] if self.nc_calls else None + data['nc_execution'] = self.nc_execution.value if self.nc_execution else None return data def to_json(self) -> dict[str, Any]: @@ -292,6 +308,24 @@ def create_from_json(cls, data: dict[str, Any]) -> 'TransactionMetadata': _val_name = data.get('validation', None) meta.validation = ValidationState.from_name(_val_name) if _val_name is not None else ValidationState.INITIAL + nc_block_root_id_raw = data.get('nc_block_root_id') + if nc_block_root_id_raw is not None: + meta.nc_block_root_id = bytes.fromhex(nc_block_root_id_raw) + else: + meta.nc_block_root_id = None + + nc_execution_raw = data.get('nc_execution_raw') + if nc_execution_raw is not None: + meta.nc_execution = NCExecutionState(nc_execution_raw) + else: + meta.nc_execution = None + + nc_calls_raw = data.get('nc_calls') + if nc_calls_raw is not None: + meta.nc_calls = [MetaNCCallRecord.from_json(x) for x in nc_calls_raw] + else: + meta.nc_calls = None + return meta @classmethod diff --git a/hathor/transaction/types.py b/hathor/transaction/types.py new file mode 100644 index 000000000..34a59e9b1 --- /dev/null +++ b/hathor/transaction/types.py @@ -0,0 +1,61 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Self + +if TYPE_CHECKING: + from hathor.nanocontracts.runner.types import CallRecord, NCIndexUpdateRecord + + +@dataclass(slots=True, frozen=True, kw_only=True) +class MetaNCCallRecord: + """Dataclass to hold NC call information in transaction metadata.""" + blueprint_id: bytes + contract_id: bytes + method_name: str + index_updates: list[NCIndexUpdateRecord] + + def to_json(self) -> dict[str, Any]: + """Convert this record to a json dict.""" + return dict( + blueprint_id=self.blueprint_id.hex(), + contract_id=self.contract_id.hex(), + method_name=self.method_name, + index_updates=[syscall.to_json() for syscall in self.index_updates] + ) + + @classmethod + def from_json(cls, json_dict: dict[str, Any]) -> Self: + """Create an instance from a json dict.""" + from hathor.nanocontracts.runner.types import nc_index_update_record_from_json + return cls( + blueprint_id=bytes.fromhex(json_dict['blueprint_id']), + contract_id=bytes.fromhex(json_dict['contract_id']), + method_name=json_dict['method_name'], + index_updates=[nc_index_update_record_from_json(syscall) for syscall in json_dict['index_updates']] + ) + + @classmethod + def from_call_record(cls, call_record: CallRecord) -> Self: + """Create an instance from a CallRecord.""" + assert call_record.index_updates is not None + return cls( + blueprint_id=call_record.blueprint_id, + contract_id=call_record.contract_id, + method_name=call_record.method_name, + index_updates=call_record.index_updates, + ) diff --git a/hathor/transaction/util.py b/hathor/transaction/util.py index d1bec3832..5239f8ac3 100644 --- a/hathor/transaction/util.py +++ b/hathor/transaction/util.py @@ -17,8 +17,11 @@ import re import struct from math import ceil, floor +from struct import error as StructError from typing import TYPE_CHECKING, Any, Callable, Optional +from hathor.transaction.exceptions import InvalidOutputValue, TransactionDataError + if TYPE_CHECKING: from hathor.conf.settings import HathorSettings @@ -42,13 +45,14 @@ def bytes_to_int(data: bytes, *, signed: bool = False) -> int: return int.from_bytes(data, byteorder='big', signed=signed) -def unpack(fmt: str, buf: bytes) -> Any: +def unpack(fmt: str, buf: bytes | memoryview) -> tuple[Any, bytes | memoryview]: size = struct.calcsize(fmt) return struct.unpack(fmt, buf[:size]), buf[size:] -def unpack_len(n: int, buf: bytes) -> tuple[bytes, bytes]: - return buf[:n], buf[n:] +def unpack_len(n: int, buf: bytes | memoryview) -> tuple[bytes, bytes | memoryview]: + ret = buf[:n] if isinstance(buf, bytes) else bytes(buf[:n]) + return ret, buf[n:] def get_deposit_amount(settings: HathorSettings, mint_amount: int) -> int: @@ -64,3 +68,52 @@ def clean_token_string(string: str) -> str: It sets to uppercase, removes double spaces and spaces at the beginning and end. """ return re.sub(r'\s\s+', ' ', string).strip().upper() + + +def decode_string_utf8(encoded: bytes, key: str) -> str: + """ Raises StructError in case it's not a valid utf-8 string + """ + try: + decoded = encoded.decode('utf-8') + return decoded + except UnicodeDecodeError: + raise StructError('{} must be a valid utf-8 string.'.format(key)) + + +def bytes_to_output_value(data: bytes) -> tuple[int, bytes]: + from hathor.serialization import BadDataError, Deserializer + from hathor.serialization.encoding.output_value import decode_output_value + deserializer = Deserializer.build_bytes_deserializer(data) + try: + output_value = decode_output_value(deserializer) + except BadDataError as e: + raise InvalidOutputValue(*e.args) + remaining_data = deserializer.read_all() + return (output_value, remaining_data) + + +def output_value_to_bytes(number: int) -> bytes: + from hathor.serialization import Serializer + from hathor.serialization.encoding.output_value import encode_output_value + serializer = Serializer.build_bytes_serializer() + try: + encode_output_value(serializer, number) + except ValueError as e: + raise InvalidOutputValue(*e.args) + return bytes(serializer.finalize()) + + +def validate_token_name_and_symbol(settings: HathorSettings, token_name: str, token_symbol: str) -> None: + """Validate token_name and token_symbol before creating a new token.""" + name_len = len(token_name) + symbol_len = len(token_symbol) + if name_len == 0 or name_len > settings.MAX_LENGTH_TOKEN_NAME: + raise TransactionDataError('Invalid token name length ({})'.format(name_len)) + if symbol_len == 0 or symbol_len > settings.MAX_LENGTH_TOKEN_SYMBOL: + raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) + + # Can't create token with hathor name or symbol + if clean_token_string(token_name) == clean_token_string(settings.HATHOR_TOKEN_NAME): + raise TransactionDataError('Invalid token name ({})'.format(token_name)) + if clean_token_string(token_symbol) == clean_token_string(settings.HATHOR_TOKEN_SYMBOL): + raise TransactionDataError('Invalid token symbol ({})'.format(token_symbol)) diff --git a/hathor/transaction/vertex_parser.py b/hathor/transaction/vertex_parser.py index 03979123f..d09e3887f 100644 --- a/hathor/transaction/vertex_parser.py +++ b/hathor/transaction/vertex_parser.py @@ -15,11 +15,12 @@ from __future__ import annotations from struct import error as StructError -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Type -from hathor.conf.settings import HathorSettings +from hathor.transaction.headers import NanoHeader, VertexBaseHeader, VertexHeaderId if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings from hathor.transaction import BaseTransaction from hathor.transaction.storage import TransactionStorage @@ -30,6 +31,23 @@ class VertexParser: def __init__(self, *, settings: HathorSettings) -> None: self._settings = settings + @staticmethod + def get_supported_headers(settings: HathorSettings) -> dict[VertexHeaderId, Type[VertexBaseHeader]]: + """Return a dict of supported headers.""" + supported_headers: dict[VertexHeaderId, Type[VertexBaseHeader]] = {} + if settings.ENABLE_NANO_CONTRACTS: + supported_headers[VertexHeaderId.NANO_HEADER] = NanoHeader + return supported_headers + + @staticmethod + def get_header_parser(header_id_bytes: bytes, settings: HathorSettings) -> Type[VertexBaseHeader]: + """Get the parser for a given header type.""" + header_id = VertexHeaderId(header_id_bytes) + supported_headers = VertexParser.get_supported_headers(settings) + if header_id not in supported_headers: + raise ValueError(f'Header type not supported: {header_id_bytes!r}') + return supported_headers[header_id] + def deserialize(self, data: bytes, storage: TransactionStorage | None = None) -> BaseTransaction: """ Creates the correct tx subclass from a sequence of bytes """ @@ -38,9 +56,15 @@ def deserialize(self, data: bytes, storage: TransactionStorage | None = None) -> version = data[1] try: tx_version = TxVersion(version) - if not self._settings.CONSENSUS_ALGORITHM.is_vertex_version_valid(tx_version, include_genesis=True): + is_valid = self._settings.CONSENSUS_ALGORITHM.is_vertex_version_valid( + tx_version, + include_genesis=True, + settings=self._settings, + ) + + if not is_valid: raise StructError(f"invalid vertex version: {tx_version}") cls = tx_version.get_cls() return cls.create_from_struct(data, storage=storage) - except ValueError: - raise StructError('Invalid bytes to create transaction subclass.') + except ValueError as e: + raise StructError('Invalid bytes to create transaction subclass.') from e diff --git a/hathor/types.py b/hathor/types.py index 7dfa808aa..d264b93af 100644 --- a/hathor/types.py +++ b/hathor/types.py @@ -14,16 +14,17 @@ from typing import TypeAlias -# XXX There is a lot of refactor to be done before we can use `NewType`. -# So, let's skip using NewType until everything is refactored. +# XXX: All of these types already have an equivalent NewType available on `hathor.nanoconracts.types`, the next step is +# to refactor the places which use `hathor.types`, which is still a lot. Some of these would also benefit from +# using custom classes like `Hash` for better str/repr. -VertexId: TypeAlias = bytes # NewType('TxId', bytes) Address: TypeAlias = bytes # NewType('Address', bytes) AddressB58: TypeAlias = str -TxOutputScript: TypeAlias = bytes # NewType('TxOutputScript', bytes) +Amount: TypeAlias = int # NewType('Amount', int) Timestamp: TypeAlias = int # NewType('Timestamp', int) +TxOutputScript: TypeAlias = bytes # NewType('TxOutputScript', bytes) +VertexId: TypeAlias = bytes # NewType('VertexId', bytes) TokenUid: TypeAlias = VertexId # NewType('TokenUid', VertexId) -Amount: TypeAlias = int # NewType('Amount', int) class Hash: diff --git a/hathor/util.py b/hathor/util.py index 755a1d381..0a87d371c 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import datetime import gc import json import math import sys import time -import warnings from collections import OrderedDict from contextlib import AbstractContextManager from dataclasses import asdict, dataclass @@ -36,6 +37,7 @@ import structlog from hathor.transaction.base_transaction import BaseTransaction + from hathor.wallet import HDWallet logger = get_logger() @@ -67,23 +69,6 @@ def practically_equal(a: dict[Any, Any], b: dict[Any, Any]) -> bool: return True -def deprecated(msg: str) -> Callable[..., Any]: - """Use to indicate that a function or method has been deprecated.""" - warnings.simplefilter('default', DeprecationWarning) - - def decorator(func: Callable[..., Any]) -> Callable[..., Any]: - @wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> Any: - # warnings.warn('{} is deprecated. {}'.format(func.__name__, msg), - # category=DeprecationWarning, stacklevel=2) - return func(*args, **kwargs) - - wrapper.__deprecated = func # type: ignore - return wrapper - - return decorator - - def skip_warning(func: Callable[..., Any]) -> Callable[..., Any]: f = cast(Callable[..., Any], getattr(func, '__deprecated', func)) if hasattr(func, '__self__') and not hasattr(f, '__self__'): @@ -377,6 +362,37 @@ def skip_n(it: Iterator[_T], n: int) -> Iterator[_T]: return it +def skip_until(it: Iterator[_T], condition: Callable[[_T], bool]) -> Iterator[_T]: + """ Skip all elements and stops after condition is True, it will also skip the element where condition is True. + + Example: + + >>> list(skip_until(iter(range(10)), lambda x: x == 0)) + [1, 2, 3, 4, 5, 6, 7, 8, 9] + + >>> list(skip_until(iter(range(10)), lambda x: x > 0)) + [2, 3, 4, 5, 6, 7, 8, 9] + + >>> list(skip_until(iter(range(10)), lambda x: x == 8)) + [9] + + >>> list(skip_until(iter(range(10)), lambda x: x == 9)) + [] + + >>> list(skip_until(iter(range(10)), lambda x: x == 10)) + [] + """ + while True: + try: + i = next(it) + except StopIteration: + return it + else: + if condition(i): + break + return it + + _DT_ITER_NEXT_WARN = 3 # time in seconds to warn when `next(iter_tx)` takes too long _DT_LOG_PROGRESS = 30 # time in seconds after which a progress will be logged (it can take longer, but not shorter) _DT_YIELD_WARN = 1 # time in seconds to warn when `yield tx` takes too long (which is when processing happens) @@ -823,3 +839,19 @@ def bytes_to_vertexid(data: bytes) -> VertexId: if len(data) != 32: raise ValueError('length must be exactly 32 bytes') return VertexId(data) + + +def bytes_from_hex(hex_str: str) -> bytes | None: + """Convert a hex string to bytes or return None if it's invalid.""" + try: + return bytes.fromhex(hex_str) + except ValueError: + return None + + +def initialize_hd_wallet(words: str) -> HDWallet: + """Get an initialized HDWallet from the provided words.""" + from hathor.wallet import HDWallet + hd = HDWallet(words=words) + hd._manually_initialize() + return hd diff --git a/hathor/utils/api.py b/hathor/utils/api.py index 52728c67a..a074f4b58 100644 --- a/hathor/utils/api.py +++ b/hathor/utils/api.py @@ -15,7 +15,7 @@ import cgi from typing import Type, TypeVar, Union -from pydantic import Field, ValidationError, validator +from pydantic import Field, ValidationError from twisted.web.http import Request from hathor.api_util import get_args @@ -31,7 +31,6 @@ class QueryParams(BaseModel): Subclass this class defining your query parameters as attributes and their respective types, then call the from_request() class method to instantiate your class from the provided request. """ - _list_to_single_item_validator = validator('*', pre=True, allow_reuse=True)(single_or_none) @classmethod def from_request(cls: Type[T], request: Request) -> Union[T, 'ErrorResponse']: @@ -43,10 +42,17 @@ def from_request(cls: Type[T], request: Request) -> Union[T, 'ErrorResponse']: encoding = options.get('charset', encoding) raw_args = get_args(request).items() - args = { - key.decode(encoding): [value.decode(encoding) for value in values] - for key, values in raw_args - } + args: dict[str, str | None | list[str]] = {} + for key, values in raw_args: + decoded_key = key.decode(encoding) + decoded_values: list[str] = [value.decode(encoding) for value in values] + if not decoded_key.endswith('[]'): + try: + args[decoded_key] = single_or_none(decoded_values) + except Exception as error: + return ErrorResponse(error=str(error)) + else: + args[decoded_key] = decoded_values try: return cls.parse_obj(args) diff --git a/hathor/utils/leb128.py b/hathor/utils/leb128.py new file mode 100644 index 000000000..fb442f0d0 --- /dev/null +++ b/hathor/utils/leb128.py @@ -0,0 +1,117 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.serialization import Deserializer, SerializationError, Serializer +from hathor.serialization.adapters import MaxBytesExceededError +from hathor.serialization.encoding.leb128 import decode_leb128, encode_leb128 + + +def encode_signed(value: int, *, max_bytes: int | None = None) -> bytes: + """ + Receive a signed integer and return its LEB128-encoded bytes. + + >>> encode_signed(0) == bytes([0x00]) + True + >>> encode_signed(624485) == bytes([0xE5, 0x8E, 0x26]) + True + >>> encode_signed(-123456) == bytes([0xC0, 0xBB, 0x78]) + True + """ + serializer: Serializer = Serializer.build_bytes_serializer() + try: + encode_leb128(serializer.with_optional_max_bytes(max_bytes), value, signed=True) + except MaxBytesExceededError as e: + raise ValueError(f'cannot encode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('serialization error') from e + return bytes(serializer.finalize()) + + +def encode_unsigned(value: int, *, max_bytes: int | None = None) -> bytes: + """ + Receive an unsigned integer and return its LEB128-encoded bytes. + + >>> encode_unsigned(0) == bytes([0x00]) + True + >>> encode_unsigned(624485) == bytes([0xE5, 0x8E, 0x26]) + True + """ + serializer: Serializer = Serializer.build_bytes_serializer() + try: + encode_leb128(serializer.with_optional_max_bytes(max_bytes), value, signed=False) + except MaxBytesExceededError as e: + raise ValueError(f'cannot encode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('serialization error') from e + return bytes(serializer.finalize()) + + +def decode_signed(data: bytes, *, max_bytes: int | None = None) -> tuple[int, bytes]: + """ + Receive and consume a buffer returning a tuple of the unpacked + LEB128-encoded signed integer and the reamining buffer. + + >>> decode_signed(bytes([0x00]) + b'test') + (0, b'test') + >>> decode_signed(bytes([0xE5, 0x8E, 0x26]) + b'test') + (624485, b'test') + >>> decode_signed(bytes([0xC0, 0xBB, 0x78]) + b'test') + (-123456, b'test') + >>> decode_signed(bytes([0xC0, 0xBB, 0x78]) + b'test', max_bytes=3) + (-123456, b'test') + >>> try: + ... decode_signed(bytes([0xC0, 0xBB, 0x78]) + b'test', max_bytes=2) + ... except ValueError as e: + ... print(e) + cannot decode more than 2 bytes + """ + deserializer = Deserializer.build_bytes_deserializer(data) + try: + value = decode_leb128(deserializer.with_optional_max_bytes(max_bytes), signed=True) + except MaxBytesExceededError as e: + raise ValueError(f'cannot decode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('deserialization error') from e + remaining_data = bytes(deserializer.read_all()) + deserializer.finalize() + return (value, remaining_data) + + +def decode_unsigned(data: bytes, *, max_bytes: int | None = None) -> tuple[int, bytes]: + """ + Receive and consume a buffer returning a tuple of the unpacked + LEB128-encoded unsigned integer and the reamining buffer. + + >>> decode_unsigned(bytes([0x00]) + b'test') + (0, b'test') + >>> decode_unsigned(bytes([0xE5, 0x8E, 0x26]) + b'test') + (624485, b'test') + >>> decode_unsigned(bytes([0xE5, 0x8E, 0x26]) + b'test', max_bytes=3) + (624485, b'test') + >>> try: + ... decode_unsigned(bytes([0xE5, 0x8E, 0x26]) + b'test', max_bytes=2) + ... except ValueError as e: + ... print(e) + cannot decode more than 2 bytes + """ + deserializer = Deserializer.build_bytes_deserializer(data) + try: + value = decode_leb128(deserializer.with_optional_max_bytes(max_bytes), signed=False) + except MaxBytesExceededError as e: + raise ValueError(f'cannot decode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('deserialization error') from e + remaining_data = bytes(deserializer.read_all()) + deserializer.finalize() + return (value, remaining_data) diff --git a/hathor/utils/typing.py b/hathor/utils/typing.py new file mode 100644 index 000000000..1e7d2a6e5 --- /dev/null +++ b/hathor/utils/typing.py @@ -0,0 +1,219 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from types import UnionType +from typing import Generic, TypeVar, get_args as _typing_get_args, get_origin as _typing_get_origin +from weakref import WeakValueDictionary + +from typing_extensions import Self + +T = TypeVar('T') + + +def get_origin(t: type | UnionType, /) -> type | None: + """Extension of typing.get_origin to also work with classes that use InnerTypeMixin""" + if isinstance(t, type) and issubclass(t, InnerTypeMixin): + return getattr(t, '__origin__', None) + return _typing_get_origin(t) + + +def get_args(t: type | UnionType, /) -> tuple[type, ...] | None: + """Extension of typing.get_args to also work with classes that use InnerTypeMixin""" + if isinstance(t, type) and issubclass(t, InnerTypeMixin): + return getattr(t, '__args__', None) + return _typing_get_args(t) + + +class InnerTypeMixin(Generic[T]): + """ + Mixin class that exposes its single type‐argument at runtime as `self.__inner_type__`, + enforces exactly one type argument at subscription time, caches parameterized subclasses + so C[int] is C[int], and provides a clean repr listing public fields. + + >>> from typing import TypeVar + >>> U = TypeVar('U') + >>> class MyData(InnerTypeMixin, Generic[T]): + ... def __init__(self, data: T): + ... self.data = data + ... + >>> class MyCounter(InnerTypeMixin, Generic[T]): + ... def __init__(self, first: T, count: int): + ... self.first = first + ... self.count = count + ... + + # 1) You must supply exactly one type argument: + >>> try: + ... MyData(1) + ... except TypeError as e: + ... print(e) + MyData[...] requires exactly one type argument, got none + + >>> try: + ... MyData[int, str](1) + ... except TypeError as e: + ... print(e) + MyData[...] expects exactly one type argument; got 2 + + # You may write MyData[U] for signatures, but instantiation will reject a bare TypeVar: + >>> MyData[U] # no error + + + >>> try: + ... MyData[U]() + ... except TypeError as e: + ... print(e) + MyData[...] requires a concrete type argument, got ~U + + # Correct usage with a concrete type: + >>> sd = MyData[int](123) + >>> MyData[int] is MyData[int] + True + >>> sd.__inner_type__ is int + True + >>> print(sd) + MyData[int](data=123) + + # Works with multiple fields too: + >>> h = MyCounter[str]("foo", 42) + >>> h.__inner_type__ is str + True + >>> print(h) + MyCounter[str](first='foo', count=42) + """ + + # cache shared by all subclasses, maps concrete inner_type -> subclass, but doesn't keep subclasses alive if it has + # no live references anymore, this keeps the cache from growing indefinitely in case of dynamically generated + # classes, there's no point in holding unreferenced classes here + __type_cache: WeakValueDictionary[tuple[type, type], type[Self]] = WeakValueDictionary() + + # this class will expose this instance property + __inner_type__: type[T] + + @classmethod + def __extract_inner_type__(cls, args: tuple[type, ...], /) -> type[T]: + """Defines how to convert the recived argument tuples into the stored type. + + If customization is needed, this class method is the place to do it. I could be used so only the origin-type is + stored, or to accept multiple arguments and store a tuple of types, or to convert the arguments into different + types. + """ + if len(args) != 1: + raise TypeError(f'{cls.__name__}[...] expects exactly one type argument; got {len(args)}') + inner_type, = args + return inner_type + + @classmethod + def __class_getitem__(cls, params): + # parameterizing the mixin itself delegates to Generic + if cls is InnerTypeMixin: + return super().__class_getitem__(params) + + # normalize to a 1-tuple + args = params if isinstance(params, tuple) else (params,) + inner_type = cls.__extract_inner_type__(args) + + cache = cls.__type_cache + key = (cls, inner_type) + sub = cache.get(key) + if sub is None: + # subclass keeps the same name for clean repr + sub = type(cls.__name__, (cls,), {}) + sub.__inner_type__ = inner_type + sub.__origin__ = cls + sub.__args__ = (inner_type,) + sub.__module__ = cls.__module__ + sub.__type_cache = cache + cache[key] = sub + return sub + + def __new__(cls, *args, **kwargs): + # reject unsubscripted class + if not get_args(cls): + raise TypeError(f'{cls.__name__}[...] requires exactly one type argument, got none') + + # reject if the subscribed‐in type is still a TypeVar + inner_type = getattr(cls, '__inner_type__', None) + if isinstance(inner_type, TypeVar): + raise TypeError(f'{cls.__name__}[...] requires a concrete type argument, got {inner_type!r}') + + # build instance and copy down the inner type + self = super().__new__(cls) + self.__inner_type__ = inner_type + return self + + def __repr__(self) -> str: + name = type(self).__name__ + t = self.__inner_type__ + tname = getattr(t, '__name__', repr(t)) + public = [(n, v) for n, v in vars(self).items() if not n.startswith('_')] + if public: + body = ', '.join(f'{n}={v!r}' for n, v in public) + return f'{name}[{tname}]({body})' + return f'{name}[{tname}]()' + + +def is_subclass(cls: type, class_or_tuple: type | tuple[type] | UnionType, /) -> bool: + """ Reimplements issubclass() with support for recursive NewType classes. + + Normal behavior from `issubclass`: + + >>> is_subclass(int, int) + True + >>> is_subclass(bool, int) + True + >>> is_subclass(bool, (int, str)) + True + >>> is_subclass(bool, int | str) + True + >>> is_subclass(bool, bytes | str) + False + >>> is_subclass(str, int) + False + + But `is_subclass` also works when a NewType is given as arg 1: + + >>> from typing import NewType + >>> N = NewType('N', int) + >>> is_subclass(N, int) + True + >>> is_subclass(N, int | str) + True + >>> is_subclass(N, str) + False + >>> M = NewType('M', N) + >>> is_subclass(M, int) + True + >>> is_subclass(M, str) + False + >>> try: + ... is_subclass(M, N) + ... except TypeError as e: + ... print(*e.args) + issubclass() arg 2 must be a class, a tuple of classes, or a union + + It is also expeced to fail in the same way as `issubclass` when the resolving the NewType doesn't lead to a class: + + >>> F = NewType('F', 'not a class') + >>> try: + ... is_subclass(F, str) + ... except TypeError as e: + ... print(*e.args) + issubclass() arg 1 must be a class + """ + while (super_type := getattr(cls, '__supertype__', None)) is not None: + cls = super_type + return issubclass(cls, class_or_tuple) diff --git a/hathor/verification/merge_mined_block_verifier.py b/hathor/verification/merge_mined_block_verifier.py index 307604104..55b7c9e02 100644 --- a/hathor/verification/merge_mined_block_verifier.py +++ b/hathor/verification/merge_mined_block_verifier.py @@ -39,4 +39,4 @@ def verify_aux_pow(self, block: MergeMinedBlock) -> None: else self._settings.OLD_MAX_MERKLE_PATH_LENGTH ) - block.aux_pow.verify(block.get_base_hash(), max_merkle_path_length) + block.aux_pow.verify(block.get_mining_base_hash(), max_merkle_path_length) diff --git a/hathor/verification/nano_header_verifier.py b/hathor/verification/nano_header_verifier.py new file mode 100644 index 000000000..cfc1dd35e --- /dev/null +++ b/hathor/verification/nano_header_verifier.py @@ -0,0 +1,117 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import defaultdict +from typing import Sequence + +from hathor.conf.settings import HATHOR_TOKEN_UID, HathorSettings +from hathor.nanocontracts.exception import NCInvalidAction, NCInvalidSignature +from hathor.nanocontracts.types import BaseAuthorityAction, NCAction, NCActionType, TokenUid +from hathor.transaction import BaseTransaction, Transaction +from hathor.transaction.exceptions import ScriptError, TooManySigOps +from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES +from hathor.transaction.scripts import SigopCounter, create_output_script +from hathor.transaction.scripts.execute import ScriptExtras, raw_script_eval + +MAX_NC_SCRIPT_SIZE: int = 1024 +MAX_NC_SCRIPT_SIGOPS_COUNT: int = 20 +MAX_ACTIONS_LEN: int = 16 +ALLOWED_ACTION_SETS: frozenset[frozenset[NCActionType]] = frozenset([ + frozenset(), + frozenset([NCActionType.DEPOSIT]), + frozenset([NCActionType.WITHDRAWAL]), + frozenset([NCActionType.GRANT_AUTHORITY]), + frozenset([NCActionType.ACQUIRE_AUTHORITY]), + frozenset([NCActionType.DEPOSIT, NCActionType.GRANT_AUTHORITY]), + frozenset([NCActionType.DEPOSIT, NCActionType.ACQUIRE_AUTHORITY]), + frozenset([NCActionType.WITHDRAWAL, NCActionType.GRANT_AUTHORITY]), + frozenset([NCActionType.WITHDRAWAL, NCActionType.ACQUIRE_AUTHORITY]), +]) + + +class NanoHeaderVerifier: + __slots__ = ('_settings',) + + def __init__(self, *, settings: HathorSettings) -> None: + self._settings = settings + + def verify_nc_signature(self, tx: BaseTransaction) -> None: + """Verify if the caller's signature is valid.""" + assert tx.is_nano_contract() + assert isinstance(tx, Transaction) + + nano_header = tx.get_nano_header() + if len(nano_header.nc_address) != ADDRESS_LEN_BYTES: + raise NCInvalidSignature(f'invalid address: {nano_header.nc_address.hex()}') + + if len(nano_header.nc_script) > MAX_NC_SCRIPT_SIZE: + raise NCInvalidSignature( + f'nc_script larger than max: {len(nano_header.nc_script)} > {MAX_NC_SCRIPT_SIZE}' + ) + + counter = SigopCounter( + max_multisig_pubkeys=self._settings.MAX_MULTISIG_PUBKEYS, + enable_checkdatasig_count=True, + ) + output_script = create_output_script(nano_header.nc_address) + sigops_count = counter.get_sigops_count(nano_header.nc_script, output_script) + if sigops_count > MAX_NC_SCRIPT_SIGOPS_COUNT: + raise TooManySigOps(f'sigops count greater than max: {sigops_count} > {MAX_NC_SCRIPT_SIGOPS_COUNT}') + + try: + raw_script_eval( + input_data=nano_header.nc_script, + output_script=output_script, + extras=ScriptExtras(tx=tx) + ) + except ScriptError as e: + raise NCInvalidSignature from e + + @staticmethod + def verify_actions(tx: BaseTransaction) -> None: + """Verify nc_actions.""" + assert tx.is_nano_contract() + assert isinstance(tx, Transaction) + + tx_tokens_set = set(tx.tokens) + nano_header = tx.get_nano_header() + actions = nano_header.get_actions() + NanoHeaderVerifier.verify_action_list(actions) + + for action in actions: + if isinstance(action, BaseAuthorityAction): + # This is verified in model creation, so we just assert here. + assert action.token_uid != HATHOR_TOKEN_UID + + if action.token_uid != HATHOR_TOKEN_UID and action.token_uid not in tx_tokens_set: + raise NCInvalidAction( + f'{action.name} action requires token {action.token_uid.hex()} in tokens list' + ) + + @staticmethod + def verify_action_list(actions: Sequence[NCAction]) -> None: + """Perform NCAction verifications that do not depend on the tx.""" + if len(actions) > MAX_ACTIONS_LEN: + raise NCInvalidAction(f'more actions than the max allowed: {len(actions)} > {MAX_ACTIONS_LEN}') + + actions_map: defaultdict[TokenUid, list[NCAction]] = defaultdict(list) + for action in actions: + actions_map[action.token_uid].append(action) + + for token_uid, actions_per_token in actions_map.items(): + action_types = {action.type for action in actions_per_token} + if action_types not in ALLOWED_ACTION_SETS: + raise NCInvalidAction(f'conflicting actions for token {token_uid.hex()}') diff --git a/hathor/verification/on_chain_blueprint_verifier.py b/hathor/verification/on_chain_blueprint_verifier.py new file mode 100644 index 000000000..bbc3e8b7b --- /dev/null +++ b/hathor/verification/on_chain_blueprint_verifier.py @@ -0,0 +1,210 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import ast + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec + +from hathor.conf.settings import HathorSettings +from hathor.crypto.util import get_address_b58_from_public_key_bytes, get_public_key_from_bytes_compressed +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.exception import NCInvalidPubKey, NCInvalidSignature, OCBInvalidScript, OCBPubKeyNotAllowed +from hathor.nanocontracts.on_chain_blueprint import ( + ALLOWED_IMPORTS, + AST_NAME_BLACKLIST, + BLUEPRINT_CLASS_NAME, + PYTHON_CODE_COMPAT_VERSION, +) + + +class _RestrictionsVisitor(ast.NodeVisitor): + def visit_Interactive(self, node: ast.Interactive) -> None: + raise AssertionError('mode="single" must not be used for parsing') + + def visit_Expression(self, node: ast.Expression) -> None: + raise AssertionError('mode="eval" must not be used for parsing') + + def visit_FunctionType(self, node: ast.Expression) -> None: + raise AssertionError('mode="func_type" must not be used for parsing') + + def visit_Import(self, node: ast.Import) -> None: + raise SyntaxError('Import statements are not allowed.') + + def visit_ImportFrom(self, node: ast.ImportFrom) -> None: + if node.module not in ALLOWED_IMPORTS: + raise SyntaxError(f'Importing from "{node.module}" is not allowed.') + allowed_fromlist = ALLOWED_IMPORTS[node.module] + for import_what in node.names: + if import_what.name not in allowed_fromlist: + raise SyntaxError(f'Importing "{import_what.name}" from "{node.module}" is not allowed.') + + def visit_Try(self, node: ast.Try) -> None: + raise SyntaxError('Try/Except blocks are not allowed.') + + def visit_Name(self, node: ast.Name) -> None: + if node.id in AST_NAME_BLACKLIST: + raise SyntaxError(f'Usage or reference to {node.id} is not allowed.') + self.generic_visit(node) + + def visit_Attribute(self, node: ast.Attribute) -> None: + if isinstance(node.value, ast.Name): + if '__' in node.attr: + raise SyntaxError('Access to internal attributes and methods is not allowed.') + self.generic_visit(node) + + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None: + raise SyntaxError('Async functions are not allowed.') + + def visit_Await(self, node: ast.Await) -> None: + raise SyntaxError('Await is not allowed.') + + def visit_AsyncFor(self, node: ast.AsyncFor) -> None: + raise SyntaxError('Async loops are not allowed.') + + def visit_AsyncWith(self, node: ast.AsyncWith) -> None: + raise SyntaxError('Async contexts are not allowed.') + + +class _SearchName(ast.NodeVisitor): + def __init__(self, name: str) -> None: + self.search_name = name + self.found = False + + def visit_Name(self, node: ast.Name) -> None: + if node.id == self.search_name: + self.found = True + return + self.generic_visit(node) + + +class OnChainBlueprintVerifier: + __slots__ = ('_settings',) + + def __init__(self, *, settings: HathorSettings): + self._settings = settings + + def verify_pubkey_is_allowed(self, tx: OnChainBlueprint) -> None: + """Verify if the on-chain blueprint's pubkey is allowed.""" + if self._settings.NC_ON_CHAIN_BLUEPRINT_RESTRICTED: + address = get_address_b58_from_public_key_bytes(tx.nc_pubkey) + if address not in self._settings.NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: + raise OCBPubKeyNotAllowed(f'nc_pubkey with address {address} is not allowed') + + def verify_nc_signature(self, tx: OnChainBlueprint) -> None: + """Verify if the creatos's signature is valid.""" + data = tx.get_sighash_all_data() + + try: + pubkey = get_public_key_from_bytes_compressed(tx.nc_pubkey) + except ValueError as e: + # pubkey is not compressed public key + raise NCInvalidPubKey('nc_pubkey is not a public key') from e + + try: + pubkey.verify(tx.nc_signature, data, ec.ECDSA(hashes.SHA256())) + except InvalidSignature as e: + raise NCInvalidSignature from e + + def _get_python_code_ast(self, tx: OnChainBlueprint) -> ast.Module: + from hathor.nanocontracts.on_chain_blueprint import CodeKind + assert tx.code.kind is CodeKind.PYTHON_ZLIB, 'only Python+Gzip is supported' + if tx._ast_cache is not None: + return tx._ast_cache + # XXX: feature_version is a best-effort compatibility, some subtle cases could break, which is important to + # deal with, so this isn't a definitive solution + # XXX: consider this: + # Signature: + # ast.parse( + # source, + # filename='', + # mode='exec', + # *, + # type_comments=False, + # feature_version=None, + # ) + # Source: + # def parse(source, filename='', mode='exec', *, + # type_comments=False, feature_version=None): + # """ + # Parse the source into an AST node. + # Equivalent to compile(source, filename, mode, PyCF_ONLY_AST). + # Pass type_comments=True to get back type comments where the syntax allows. + # """ + # flags = PyCF_ONLY_AST + # if type_comments: + # flags |= PyCF_TYPE_COMMENTS + # if feature_version is None: + # feature_version = -1 + # elif isinstance(feature_version, tuple): + # major, minor = feature_version # Should be a 2-tuple. + # if major != 3: + # raise ValueError(f"Unsupported major version: {major}") + # feature_version = minor + # # Else it should be an int giving the minor version for 3.x. + # return compile(source, filename, mode, flags, + # _feature_version=feature_version) + # XXX: in practice we want to use ast.parse, but we need specify `dont_inherit=True` to prevent the current + # module's `from __future__ ...` imports from affecting the compilation, `_feature_version` is a private + # argument, so we have to be mindful of this whenever there's an update to Python's version + parsed_tree = compile( + source=tx.code.text, + filename=f'<{tx.hash.hex()}.code>', + mode='exec', + flags=ast.PyCF_ONLY_AST, + dont_inherit=True, + optimize=0, + _feature_version=PYTHON_CODE_COMPAT_VERSION[1], + ) + assert isinstance(parsed_tree, ast.Module) + tx._ast_cache = parsed_tree + return parsed_tree + + def verify_code(self, tx: OnChainBlueprint) -> None: + """Run all verification related to the blueprint code.""" + self._verify_python_script(tx) + self._verify_script_restrictions(tx) + self._verify_has_blueprint_attr(tx) + self._verify_blueprint_type(tx) + + def _verify_python_script(self, tx: OnChainBlueprint) -> None: + """Verify that the script can be parsed at all.""" + try: + self._get_python_code_ast(tx) + except SyntaxError as e: + raise OCBInvalidScript('Could not correctly parse the script') from e + + def _verify_script_restrictions(self, tx: OnChainBlueprint) -> None: + """Verify that the script does not use any forbidden syntax.""" + try: + _RestrictionsVisitor().visit(self._get_python_code_ast(tx)) + except SyntaxError as e: + raise OCBInvalidScript('forbidden syntax') from e + + def _verify_has_blueprint_attr(self, tx: OnChainBlueprint) -> None: + """Verify that the script defines a __blueprint__ attribute.""" + search_name = _SearchName(BLUEPRINT_CLASS_NAME) + search_name.visit(self._get_python_code_ast(tx)) + if not search_name.found: + raise OCBInvalidScript(f'Could not find {BLUEPRINT_CLASS_NAME} object') + + def _verify_blueprint_type(self, tx: OnChainBlueprint) -> None: + """Verify that the __blueprint__ is a Blueprint, this will load and execute the blueprint code.""" + from hathor.nanocontracts.blueprint import Blueprint + blueprint_class = tx.get_blueprint_object_bypass() + if not isinstance(blueprint_class, type): + raise OCBInvalidScript(f'{BLUEPRINT_CLASS_NAME} is not a class') + if not issubclass(blueprint_class, Blueprint): + raise OCBInvalidScript(f'{BLUEPRINT_CLASS_NAME} is not a Blueprint subclass') diff --git a/hathor/verification/token_creation_transaction_verifier.py b/hathor/verification/token_creation_transaction_verifier.py index 4d0ac543c..db1d9c23f 100644 --- a/hathor/verification/token_creation_transaction_verifier.py +++ b/hathor/verification/token_creation_transaction_verifier.py @@ -13,10 +13,10 @@ # limitations under the License. from hathor.conf.settings import HathorSettings -from hathor.transaction.exceptions import InvalidToken, TransactionDataError +from hathor.transaction.exceptions import InvalidToken from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.transaction import TokenInfo -from hathor.transaction.util import clean_token_string +from hathor.transaction.util import validate_token_name_and_symbol from hathor.types import TokenUid @@ -42,15 +42,4 @@ def verify_minted_tokens(self, tx: TokenCreationTransaction, token_dict: dict[To def verify_token_info(self, tx: TokenCreationTransaction) -> None: """ Validates token info """ - name_len = len(tx.token_name) - symbol_len = len(tx.token_symbol) - if name_len == 0 or name_len > self._settings.MAX_LENGTH_TOKEN_NAME: - raise TransactionDataError('Invalid token name length ({})'.format(name_len)) - if symbol_len == 0 or symbol_len > self._settings.MAX_LENGTH_TOKEN_SYMBOL: - raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) - - # Can't create token with hathor name or symbol - if clean_token_string(tx.token_name) == clean_token_string(self._settings.HATHOR_TOKEN_NAME): - raise TransactionDataError('Invalid token name ({})'.format(tx.token_name)) - if clean_token_string(tx.token_symbol) == clean_token_string(self._settings.HATHOR_TOKEN_SYMBOL): - raise TransactionDataError('Invalid token symbol ({})'.format(tx.token_symbol)) + validate_token_name_and_symbol(self._settings, tx.token_name, tx.token_symbol) diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 906df38c2..3a961a6ac 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -12,12 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.conf.settings import HathorSettings +from __future__ import annotations + +from typing import TYPE_CHECKING, assert_never + from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import FeatureService from hathor.profiler import get_cpu_profiler from hathor.reward_lock import get_spent_reward_locked_info from hathor.reward_lock.reward_lock import get_minimum_best_height -from hathor.transaction import BaseTransaction, Transaction, TxInput +from hathor.transaction import BaseTransaction, Transaction, TxInput, TxVersion from hathor.transaction.exceptions import ( ConflictingInputs, DuplicatedParents, @@ -27,10 +32,11 @@ InvalidInputData, InvalidInputDataSize, InvalidToken, - NoInputError, + InvalidVersionError, RewardLocked, ScriptError, TimestampError, + TooFewInputs, TooManyInputs, TooManySigOps, WeightError, @@ -39,15 +45,25 @@ from hathor.transaction.util import get_deposit_amount, get_withdraw_amount from hathor.types import TokenUid, VertexId +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + cpu = get_cpu_profiler() class TransactionVerifier: - __slots__ = ('_settings', '_daa') + __slots__ = ('_settings', '_daa', '_feature_service') - def __init__(self, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm) -> None: + def __init__( + self, + *, + settings: HathorSettings, + daa: DifficultyAdjustmentAlgorithm, + feature_service: FeatureService, + ) -> None: self._settings = settings self._daa = daa + self._feature_service = feature_service def verify_parents_basic(self, tx: Transaction) -> None: """Verify number and non-duplicity of parents.""" @@ -73,11 +89,17 @@ def verify_weight(self, tx: Transaction) -> None: raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' f'greater than the maximum allowed ({max_tx_weight})') - def verify_sigops_input(self, tx: Transaction) -> None: + def verify_sigops_input(self, tx: Transaction, enable_checkdatasig_count: bool = True) -> None: """ Count sig operations on all inputs and verify that the total sum is below the limit """ - from hathor.transaction.scripts import get_sigops_count + from hathor.transaction.scripts import SigopCounter from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + counter = SigopCounter( + max_multisig_pubkeys=self._settings.MAX_MULTISIG_PUBKEYS, + enable_checkdatasig_count=enable_checkdatasig_count, + ) + n_txops = 0 for tx_input in tx.inputs: try: @@ -87,7 +109,7 @@ def verify_sigops_input(self, tx: Transaction) -> None: if tx_input.index >= len(spent_tx.outputs): raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( tx_input.tx_id.hex(), tx_input.index)) - n_txops += get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) + n_txops += counter.get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) if n_txops > self._settings.MAX_TX_SIGOPS_INPUT: raise TooManySigOps( @@ -193,9 +215,10 @@ def verify_number_of_inputs(self, tx: Transaction) -> None: if len(tx.inputs) > self._settings.MAX_NUM_INPUTS: raise TooManyInputs('Maximum number of inputs exceeded') - if len(tx.inputs) == 0: + minimum = tx.get_minimum_number_of_inputs() + if len(tx.inputs) < minimum: if not tx.is_genesis: - raise NoInputError('Transaction must have at least one input') + raise TooFewInputs(f'Transaction must have at least {minimum} input(s)') def verify_output_token_indexes(self, tx: Transaction) -> None: """Verify outputs reference an existing token uid in the tokens list @@ -248,3 +271,25 @@ def verify_sum(self, token_dict: dict[TokenUid, TokenInfo]) -> None: htr_info.amount, htr_expected_amount, )) + + def verify_version(self, tx: Transaction) -> None: + """Verify that the vertex version is valid.""" + from hathor.conf.settings import NanoContractsSetting + allowed_tx_versions = { + TxVersion.REGULAR_TRANSACTION, + TxVersion.TOKEN_CREATION_TRANSACTION, + } + + match self._settings.ENABLE_NANO_CONTRACTS: + case NanoContractsSetting.DISABLED: + pass + case NanoContractsSetting.ENABLED: + allowed_tx_versions.add(TxVersion.ON_CHAIN_BLUEPRINT) + case NanoContractsSetting.FEATURE_ACTIVATION: + if self._feature_service.is_feature_active(vertex=tx, feature=Feature.NANO_CONTRACTS): + allowed_tx_versions.add(TxVersion.ON_CHAIN_BLUEPRINT) + case _ as unreachable: + assert_never(unreachable) + + if tx.version not in allowed_tx_versions: + raise InvalidVersionError(f'invalid vertex version: {tx.version}') diff --git a/hathor/verification/verification_params.py b/hathor/verification/verification_params.py new file mode 100644 index 000000000..c9e4ac888 --- /dev/null +++ b/hathor/verification/verification_params.py @@ -0,0 +1,36 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass + + +@dataclass(slots=True, frozen=True, kw_only=True) +class VerificationParams: + """Contains every parameter/setting to run a single verification.""" + + enable_checkdatasig_count: bool + reject_locked_reward: bool = True + skip_block_weight_verification: bool = False + + @classmethod + def default_for_mempool(cls) -> VerificationParams: + """This is the appropriate parameters for veriyfing mempool transactions, realtime blocks and API pushes. + + Other cases should instantiate `VerificationParams` manually with the appropriate parameter values. + """ + return cls( + enable_checkdatasig_count=True, + ) diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index e966692ec..33f0a6c99 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -15,6 +15,7 @@ from typing_extensions import assert_never from hathor.conf.settings import HathorSettings +from hathor.nanocontracts import OnChainBlueprint from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.poa import PoaBlock @@ -23,6 +24,7 @@ from hathor.transaction.transaction import TokenInfo from hathor.transaction.validation_state import ValidationState from hathor.types import TokenUid +from hathor.verification.verification_params import VerificationParams from hathor.verification.vertex_verifiers import VertexVerifiers cpu = get_cpu_profiler() @@ -42,7 +44,7 @@ def __init__( self.verifiers = verifiers self._tx_storage = tx_storage - def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verification: bool = False) -> bool: + def validate_basic(self, vertex: BaseTransaction, params: VerificationParams) -> bool: """ Run basic validations (all that are possible without dependencies) and update the validation state. If no exception is raised, the ValidationState will end up as `BASIC` and return `True`. @@ -51,7 +53,7 @@ def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificat if vertex.get_metadata().validation.is_at_least_basic(): return True - self.verify_basic(vertex, skip_block_weight_verification=skip_block_weight_verification) + self.verify_basic(vertex, params) vertex.set_validation(ValidationState.BASIC) return True @@ -59,10 +61,9 @@ def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificat def validate_full( self, vertex: BaseTransaction, + params: VerificationParams, *, - skip_block_weight_verification: bool = False, sync_checkpoints: bool = False, - reject_locked_reward: bool = True, init_static_metadata: bool = True, ) -> bool: """ Run full validations (these need access to all dependencies) and update the validation state. @@ -86,53 +87,65 @@ def validate_full( # ends up being CHECKPOINT_FULL instead of FULL if not meta.validation.is_at_least_basic(): # run basic validation if we haven't already - self.verify_basic(vertex, skip_block_weight_verification=skip_block_weight_verification) + self.verify_basic(vertex, params) - self.verify(vertex, reject_locked_reward=reject_locked_reward) + self.verify(vertex, params) validation = ValidationState.CHECKPOINT_FULL if sync_checkpoints else ValidationState.FULL vertex.set_validation(validation) return True - def verify_basic(self, vertex: BaseTransaction, *, skip_block_weight_verification: bool = False) -> None: + def verify_basic( + self, + vertex: BaseTransaction, + params: VerificationParams, + ) -> None: """Basic verifications (the ones without access to dependencies: parents+inputs). Raises on error. Used by `self.validate_basic`. Should not modify the validation state.""" - self.verifiers.vertex.verify_version(vertex) + self.verifiers.vertex.verify_version_basic(vertex) # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: assert type(vertex) is Block - self._verify_basic_block(vertex, skip_weight_verification=skip_block_weight_verification) + self._verify_basic_block(vertex, params) case TxVersion.MERGE_MINED_BLOCK: assert type(vertex) is MergeMinedBlock - self._verify_basic_merge_mined_block(vertex, skip_weight_verification=skip_block_weight_verification) + self._verify_basic_merge_mined_block(vertex, params) case TxVersion.POA_BLOCK: assert type(vertex) is PoaBlock self._verify_basic_poa_block(vertex) case TxVersion.REGULAR_TRANSACTION: assert type(vertex) is Transaction - self._verify_basic_tx(vertex) + self._verify_basic_tx(vertex, params) case TxVersion.TOKEN_CREATION_TRANSACTION: assert type(vertex) is TokenCreationTransaction - self._verify_basic_token_creation_tx(vertex) + self._verify_basic_token_creation_tx(vertex, params) + case TxVersion.ON_CHAIN_BLUEPRINT: + assert type(vertex) is OnChainBlueprint + assert self._settings.ENABLE_NANO_CONTRACTS + self._verify_basic_on_chain_blueprint(vertex, params) case _: assert_never(vertex.version) - def _verify_basic_block(self, block: Block, *, skip_weight_verification: bool) -> None: + if vertex.is_nano_contract(): + assert self._settings.ENABLE_NANO_CONTRACTS + # nothing to do + + def _verify_basic_block(self, block: Block, params: VerificationParams) -> None: """Partially run validations, the ones that need parents/inputs are skipped.""" - if not skip_weight_verification: + if not params.skip_block_weight_verification: self.verifiers.block.verify_weight(block) self.verifiers.block.verify_reward(block) - def _verify_basic_merge_mined_block(self, block: MergeMinedBlock, *, skip_weight_verification: bool) -> None: - self._verify_basic_block(block, skip_weight_verification=skip_weight_verification) + def _verify_basic_merge_mined_block(self, block: MergeMinedBlock, params: VerificationParams) -> None: + self._verify_basic_block(block, params) def _verify_basic_poa_block(self, block: PoaBlock) -> None: self.verifiers.poa_block.verify_poa(block) self.verifiers.block.verify_reward(block) - def _verify_basic_tx(self, tx: Transaction) -> None: + def _verify_basic_tx(self, tx: Transaction, params: VerificationParams) -> None: """Partially run validations, the ones that need parents/inputs are skipped.""" if tx.is_genesis: # TODO do genesis validation? @@ -140,37 +153,50 @@ def _verify_basic_tx(self, tx: Transaction) -> None: self.verifiers.tx.verify_parents_basic(tx) if self._settings.CONSENSUS_ALGORITHM.is_pow(): self.verifiers.tx.verify_weight(tx) - self.verify_without_storage(tx) + self.verify_without_storage(tx, params) - def _verify_basic_token_creation_tx(self, tx: TokenCreationTransaction) -> None: - self._verify_basic_tx(tx) + def _verify_basic_token_creation_tx(self, tx: TokenCreationTransaction, params: VerificationParams) -> None: + self._verify_basic_tx(tx, params) - def verify(self, vertex: BaseTransaction, *, reject_locked_reward: bool = True) -> None: + def _verify_basic_on_chain_blueprint(self, tx: OnChainBlueprint, params: VerificationParams) -> None: + self._verify_basic_tx(tx, params) + + def verify(self, vertex: BaseTransaction, params: VerificationParams) -> None: """Run all verifications. Raises on error. Used by `self.validate_full`. Should not modify the validation state.""" + self.verifiers.vertex.verify_headers(vertex) + # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: assert type(vertex) is Block - self._verify_block(vertex) + self._verify_block(vertex, params) case TxVersion.MERGE_MINED_BLOCK: assert type(vertex) is MergeMinedBlock - self._verify_merge_mined_block(vertex) + self._verify_merge_mined_block(vertex, params) case TxVersion.POA_BLOCK: assert type(vertex) is PoaBlock - self._verify_poa_block(vertex) + self._verify_poa_block(vertex, params) case TxVersion.REGULAR_TRANSACTION: assert type(vertex) is Transaction - self._verify_tx(vertex, reject_locked_reward=reject_locked_reward) + self._verify_tx(vertex, params) case TxVersion.TOKEN_CREATION_TRANSACTION: assert type(vertex) is TokenCreationTransaction - self._verify_token_creation_tx(vertex, reject_locked_reward=reject_locked_reward) + self._verify_token_creation_tx(vertex, params) + case TxVersion.ON_CHAIN_BLUEPRINT: + assert type(vertex) is OnChainBlueprint + # TODO: on-chain blueprint verifications + self._verify_tx(vertex, params) case _: assert_never(vertex.version) + if vertex.is_nano_contract(): + assert self._settings.ENABLE_NANO_CONTRACTS + # nothing to do + @cpu.profiler(key=lambda _, block: 'block-verify!{}'.format(block.hash.hex())) - def _verify_block(self, block: Block) -> None: + def _verify_block(self, block: Block, params: VerificationParams) -> None: """ (1) confirms at least two pending transactions and references last block (2) solves the pow with the correct weight (done in HathorManager) @@ -184,7 +210,7 @@ def _verify_block(self, block: Block) -> None: # TODO do genesis validation return - self.verify_without_storage(block) + self.verify_without_storage(block, params) # (1) and (4) self.verifiers.vertex.verify_parents(block) @@ -193,19 +219,19 @@ def _verify_block(self, block: Block) -> None: self.verifiers.block.verify_mandatory_signaling(block) - def _verify_merge_mined_block(self, block: MergeMinedBlock) -> None: + def _verify_merge_mined_block(self, block: MergeMinedBlock, params: VerificationParams) -> None: self.verifiers.merge_mined_block.verify_aux_pow(block) - self._verify_block(block) + self._verify_block(block, params) - def _verify_poa_block(self, block: PoaBlock) -> None: - self._verify_block(block) + def _verify_poa_block(self, block: PoaBlock, params: VerificationParams) -> None: + self._verify_block(block, params) @cpu.profiler(key=lambda _, tx: 'tx-verify!{}'.format(tx.hash.hex())) def _verify_tx( self, tx: Transaction, + params: VerificationParams, *, - reject_locked_reward: bool, token_dict: dict[TokenUid, TokenInfo] | None = None ) -> None: """ Common verification for all transactions: @@ -222,65 +248,73 @@ def _verify_tx( if tx.is_genesis: # TODO do genesis validation return - self.verify_without_storage(tx) - self.verifiers.tx.verify_sigops_input(tx) + self.verify_without_storage(tx, params) + self.verifiers.tx.verify_sigops_input(tx, params.enable_checkdatasig_count) self.verifiers.tx.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist - self.verifiers.vertex.verify_parents(tx) self.verifiers.tx.verify_sum(token_dict or tx.get_complete_token_info()) - if reject_locked_reward: + self.verifiers.tx.verify_version(tx) + self.verifiers.vertex.verify_parents(tx) + if params.reject_locked_reward: self.verifiers.tx.verify_reward_locked(tx) - def _verify_token_creation_tx(self, tx: TokenCreationTransaction, *, reject_locked_reward: bool) -> None: + def _verify_token_creation_tx(self, tx: TokenCreationTransaction, params: VerificationParams) -> None: """ Run all validations as regular transactions plus validation on token info. We also overload verify_sum to make some different checks """ token_dict = tx.get_complete_token_info() - self._verify_tx(tx, reject_locked_reward=reject_locked_reward, token_dict=token_dict) + self._verify_tx(tx, params, token_dict=token_dict) self.verifiers.token_creation_tx.verify_minted_tokens(tx, token_dict) self.verifiers.token_creation_tx.verify_token_info(tx) - def verify_without_storage(self, vertex: BaseTransaction) -> None: + def verify_without_storage(self, vertex: BaseTransaction, params: VerificationParams) -> None: # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: assert type(vertex) is Block - self._verify_without_storage_block(vertex) + self._verify_without_storage_block(vertex, params) case TxVersion.MERGE_MINED_BLOCK: assert type(vertex) is MergeMinedBlock - self._verify_without_storage_merge_mined_block(vertex) + self._verify_without_storage_merge_mined_block(vertex, params) case TxVersion.POA_BLOCK: assert type(vertex) is PoaBlock - self._verify_without_storage_poa_block(vertex) + self._verify_without_storage_poa_block(vertex, params) case TxVersion.REGULAR_TRANSACTION: assert type(vertex) is Transaction - self._verify_without_storage_tx(vertex) + self._verify_without_storage_tx(vertex, params) case TxVersion.TOKEN_CREATION_TRANSACTION: assert type(vertex) is TokenCreationTransaction - self._verify_without_storage_token_creation_tx(vertex) + self._verify_without_storage_token_creation_tx(vertex, params) + case TxVersion.ON_CHAIN_BLUEPRINT: + assert type(vertex) is OnChainBlueprint + self._verify_without_storage_on_chain_blueprint(vertex, params) case _: assert_never(vertex.version) - def _verify_without_storage_base_block(self, block: Block) -> None: + if vertex.is_nano_contract(): + assert self._settings.ENABLE_NANO_CONTRACTS + self._verify_without_storage_nano_header(vertex, params) + + def _verify_without_storage_base_block(self, block: Block, params: VerificationParams) -> None: self.verifiers.block.verify_no_inputs(block) self.verifiers.vertex.verify_outputs(block) self.verifiers.block.verify_output_token_indexes(block) self.verifiers.block.verify_data(block) - self.verifiers.vertex.verify_sigops_output(block) + self.verifiers.vertex.verify_sigops_output(block, params.enable_checkdatasig_count) - def _verify_without_storage_block(self, block: Block) -> None: + def _verify_without_storage_block(self, block: Block, params: VerificationParams) -> None: """ Run all verifications that do not need a storage. """ self.verifiers.vertex.verify_pow(block) - self._verify_without_storage_base_block(block) + self._verify_without_storage_base_block(block, params) - def _verify_without_storage_merge_mined_block(self, block: MergeMinedBlock) -> None: - self._verify_without_storage_block(block) + def _verify_without_storage_merge_mined_block(self, block: MergeMinedBlock, params: VerificationParams) -> None: + self._verify_without_storage_block(block, params) - def _verify_without_storage_poa_block(self, block: PoaBlock) -> None: - self._verify_without_storage_base_block(block) + def _verify_without_storage_poa_block(self, block: PoaBlock, params: VerificationParams) -> None: + self._verify_without_storage_base_block(block, params) - def _verify_without_storage_tx(self, tx: Transaction) -> None: + def _verify_without_storage_tx(self, tx: Transaction, params: VerificationParams) -> None: """ Run all verifications that do not need a storage. """ if self._settings.CONSENSUS_ALGORITHM.is_pow(): @@ -288,7 +322,26 @@ def _verify_without_storage_tx(self, tx: Transaction) -> None: self.verifiers.tx.verify_number_of_inputs(tx) self.verifiers.vertex.verify_outputs(tx) self.verifiers.tx.verify_output_token_indexes(tx) - self.verifiers.vertex.verify_sigops_output(tx) + self.verifiers.vertex.verify_sigops_output(tx, params.enable_checkdatasig_count) + + def _verify_without_storage_token_creation_tx( + self, + tx: TokenCreationTransaction, + params: VerificationParams, + ) -> None: + self._verify_without_storage_tx(tx, params) + + def _verify_without_storage_nano_header(self, tx: BaseTransaction, params: VerificationParams) -> None: + assert tx.is_nano_contract() + self.verifiers.nano_header.verify_nc_signature(tx) + self.verifiers.nano_header.verify_actions(tx) - def _verify_without_storage_token_creation_tx(self, tx: TokenCreationTransaction) -> None: - self._verify_without_storage_tx(tx) + def _verify_without_storage_on_chain_blueprint( + self, + tx: OnChainBlueprint, + params: VerificationParams, + ) -> None: + self._verify_without_storage_tx(tx, params) + self.verifiers.on_chain_blueprint.verify_pubkey_is_allowed(tx) + self.verifiers.on_chain_blueprint.verify_nc_signature(tx) + self.verifiers.on_chain_blueprint.verify_code(tx) diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py index 0e4282410..546158aee 100644 --- a/hathor/verification/vertex_verifier.py +++ b/hathor/verification/vertex_verifier.py @@ -14,10 +14,15 @@ from typing import Optional -from hathor.conf.settings import HathorSettings -from hathor.transaction import BaseTransaction +from typing_extensions import assert_never + +from hathor.conf.settings import HathorSettings, NanoContractsSetting +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.feature_service import FeatureService +from hathor.transaction import BaseTransaction, TxVersion from hathor.transaction.exceptions import ( DuplicatedParents, + HeaderNotSupported, IncorrectParents, InvalidOutputScriptSize, InvalidOutputValue, @@ -26,9 +31,11 @@ ParentDoesNotExist, PowError, TimestampError, + TooManyHeaders, TooManyOutputs, TooManySigOps, ) +from hathor.transaction.headers import NanoHeader, VertexBaseHeader # tx should have 2 parents, both other transactions _TX_PARENTS_TXS = 2 @@ -40,14 +47,15 @@ class VertexVerifier: - __slots__ = ('_settings',) + __slots__ = ('_settings', '_feature_service',) - def __init__(self, *, settings: HathorSettings) -> None: + def __init__(self, *, settings: HathorSettings, feature_service: FeatureService): self._settings = settings + self._feature_service = feature_service - def verify_version(self, vertex: BaseTransaction) -> None: + def verify_version_basic(self, vertex: BaseTransaction) -> None: """Verify that the vertex version is valid.""" - if not self._settings.CONSENSUS_ALGORITHM.is_vertex_version_valid(vertex.version): + if not self._settings.CONSENSUS_ALGORITHM.is_vertex_version_valid(vertex.version, settings=self._settings): raise InvalidVersionError(f"invalid vertex version: {vertex.version}") def verify_parents(self, vertex: BaseTransaction) -> None: @@ -167,15 +175,61 @@ def verify_number_of_outputs(self, vertex: BaseTransaction) -> None: if len(vertex.outputs) > self._settings.MAX_NUM_OUTPUTS: raise TooManyOutputs('Maximum number of outputs exceeded') - def verify_sigops_output(self, vertex: BaseTransaction) -> None: + def verify_sigops_output(self, vertex: BaseTransaction, enable_checkdatasig_count: bool = True) -> None: """ Count sig operations on all outputs and verify that the total sum is below the limit """ - from hathor.transaction.scripts import get_sigops_count + from hathor.transaction.scripts import SigopCounter + + max_multisig_pubkeys = self._settings.MAX_MULTISIG_PUBKEYS + counter = SigopCounter( + max_multisig_pubkeys=max_multisig_pubkeys, + enable_checkdatasig_count=enable_checkdatasig_count, + ) + n_txops = 0 for tx_output in vertex.outputs: - n_txops += get_sigops_count(tx_output.script) + n_txops += counter.get_sigops_count(tx_output.script) if n_txops > self._settings.MAX_TX_SIGOPS_OUTPUT: raise TooManySigOps('TX[{}]: Maximum number of sigops for all outputs exceeded ({})'.format( vertex.hash_hex, n_txops)) + + def get_allowed_headers(self, vertex: BaseTransaction) -> set[type[VertexBaseHeader]]: + """Return a set of allowed headers for the vertex.""" + allowed_headers: set[type[VertexBaseHeader]] = set() + match vertex.version: + case TxVersion.REGULAR_BLOCK: + pass + case TxVersion.MERGE_MINED_BLOCK: + pass + case TxVersion.POA_BLOCK: + pass + case TxVersion.ON_CHAIN_BLUEPRINT: + pass + case TxVersion.REGULAR_TRANSACTION | TxVersion.TOKEN_CREATION_TRANSACTION: + match self._settings.ENABLE_NANO_CONTRACTS: + case NanoContractsSetting.DISABLED: + pass + case NanoContractsSetting.ENABLED: + allowed_headers.add(NanoHeader) + case NanoContractsSetting.FEATURE_ACTIVATION: + if self._feature_service.is_feature_active(vertex=vertex, feature=Feature.NANO_CONTRACTS): + allowed_headers.add(NanoHeader) + case _ as unreachable: + assert_never(unreachable) + case _: + assert_never(vertex.version) + return allowed_headers + + def verify_headers(self, vertex: BaseTransaction) -> None: + """Verify the headers.""" + if len(vertex.headers) > vertex.get_maximum_number_of_headers(): + raise TooManyHeaders('Maximum number of headers exceeded') + + allowed_headers = self.get_allowed_headers(vertex) + for header in vertex.headers: + if type(header) not in allowed_headers: + raise HeaderNotSupported( + f'Header `{type(header).__name__}` not supported by `{type(vertex).__name__}`' + ) diff --git a/hathor/verification/vertex_verifiers.py b/hathor/verification/vertex_verifiers.py index 1a9b56b21..327e9cbc9 100644 --- a/hathor/verification/vertex_verifiers.py +++ b/hathor/verification/vertex_verifiers.py @@ -19,6 +19,8 @@ from hathor.feature_activation.feature_service import FeatureService from hathor.verification.block_verifier import BlockVerifier from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier +from hathor.verification.nano_header_verifier import NanoHeaderVerifier +from hathor.verification.on_chain_blueprint_verifier import OnChainBlueprintVerifier from hathor.verification.poa_block_verifier import PoaBlockVerifier from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier from hathor.verification.transaction_verifier import TransactionVerifier @@ -33,6 +35,8 @@ class VertexVerifiers(NamedTuple): poa_block: PoaBlockVerifier tx: TransactionVerifier token_creation_tx: TokenCreationTransactionVerifier + nano_header: NanoHeaderVerifier + on_chain_blueprint: OnChainBlueprintVerifier @classmethod def create_defaults( @@ -46,7 +50,7 @@ def create_defaults( Create a VertexVerifiers instance using the default verifier for each vertex type, from all required dependencies. """ - vertex_verifier = VertexVerifier(settings=settings) + vertex_verifier = VertexVerifier(settings=settings, feature_service=feature_service) return cls.create( settings=settings, @@ -70,8 +74,10 @@ def create( block_verifier = BlockVerifier(settings=settings, daa=daa, feature_service=feature_service) merge_mined_block_verifier = MergeMinedBlockVerifier(settings=settings, feature_service=feature_service) poa_block_verifier = PoaBlockVerifier(settings=settings) - tx_verifier = TransactionVerifier(settings=settings, daa=daa) + tx_verifier = TransactionVerifier(settings=settings, daa=daa, feature_service=feature_service) token_creation_tx_verifier = TokenCreationTransactionVerifier(settings=settings) + nano_header_verifier = NanoHeaderVerifier(settings=settings) + on_chain_blueprint_verifier = OnChainBlueprintVerifier(settings=settings) return VertexVerifiers( vertex=vertex_verifier, @@ -80,4 +86,6 @@ def create( poa_block=poa_block_verifier, tx=tx_verifier, token_creation_tx=token_creation_tx_verifier, + nano_header=nano_header_verifier, + on_chain_blueprint=on_chain_blueprint_verifier, ) diff --git a/hathor/version.py b/hathor/version.py index a89edf332..cea619850 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -19,7 +19,7 @@ from structlog import get_logger -BASE_VERSION = '0.63.1' +BASE_VERSION = '0.64.0' DEFAULT_VERSION_SUFFIX = "local" BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" diff --git a/hathor/version_resource.py b/hathor/version_resource.py index 57d2801f2..a0942e2cb 100644 --- a/hathor/version_resource.py +++ b/hathor/version_resource.py @@ -43,6 +43,7 @@ def render_GET(self, request): data = { 'version': hathor.__version__, 'network': self.manager.network, + 'nano_contracts_enabled': self._settings.ENABLE_NANO_CONTRACTS, 'min_weight': self._settings.MIN_TX_WEIGHT, # DEPRECATED 'min_tx_weight': self._settings.MIN_TX_WEIGHT, 'min_tx_weight_coefficient': self._settings.MIN_TX_WEIGHT_COEFFICIENT, diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index f1b836444..d34131d7e 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -13,20 +13,26 @@ # limitations under the License. import datetime +from dataclasses import replace +from typing import Any, Generator from structlog import get_logger +from twisted.internet.defer import inlineCallbacks +from twisted.internet.task import deferLater from hathor.conf.settings import HathorSettings from hathor.consensus import ConsensusAlgorithm from hathor.exception import HathorError, InvalidNewTransaction from hathor.execution_manager import ExecutionManager +from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol -from hathor.transaction import BaseTransaction, Block +from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.storage import TransactionStorage from hathor.transaction.storage.exceptions import TransactionDoesNotExist +from hathor.verification.verification_params import VerificationParams from hathor.verification.verification_service import VerificationService from hathor.wallet import BaseWallet @@ -75,37 +81,67 @@ def __init__( self._wallet = wallet self._log_vertex_bytes = log_vertex_bytes - @cpu.profiler('on_new_vertex') - def on_new_vertex( + @cpu.profiler('on_new_block') + @inlineCallbacks + def on_new_block(self, block: Block, *, deps: list[Transaction]) -> Generator[Any, Any, bool]: + parent_block_hash = block.get_block_parent_hash() + parent_block = self._tx_storage.get_block(parent_block_hash) + + enable_checkdatasig_count = self._feature_service.is_feature_active( + vertex=parent_block, + feature=Feature.COUNT_CHECKDATASIG_OP + ) + params = VerificationParams(enable_checkdatasig_count=enable_checkdatasig_count) + + for tx in deps: + if not self._tx_storage.transaction_exists(tx.hash): + if not self._old_on_new_vertex(tx, params): + return False + yield deferLater(self._reactor, 0, lambda: None) + + if not self._tx_storage.transaction_exists(block.hash): + if not self._old_on_new_vertex(block, params): + return False + + return True + + @cpu.profiler('on_new_mempool_transaction') + def on_new_mempool_transaction(self, tx: Transaction) -> bool: + params = VerificationParams.default_for_mempool() + return self._old_on_new_vertex(tx, params) + + @cpu.profiler('on_new_relayed_vertex') + def on_new_relayed_vertex( self, vertex: BaseTransaction, *, quiet: bool = False, - fails_silently: bool = True, - reject_locked_reward: bool = True, + ) -> bool: + # XXX: checkdatasig enabled for relayed vertices + params = VerificationParams.default_for_mempool() + return self._old_on_new_vertex(vertex, params, quiet=quiet) + + @cpu.profiler('_old_on_new_vertex') + def _old_on_new_vertex( + self, + vertex: BaseTransaction, + params: VerificationParams, + *, + quiet: bool = False, ) -> bool: """ New method for adding transactions or blocks that steps the validation state machine. :param vertex: transaction to be added :param quiet: if True will not log when a new tx is accepted - :param fails_silently: if False will raise an exception when tx cannot be added """ - is_valid = self._validate_vertex( - vertex, - fails_silently=fails_silently, - reject_locked_reward=reject_locked_reward - ) + is_valid = self._validate_vertex(vertex, params) if not is_valid: return False try: self._unsafe_save_and_run_consensus(vertex) - self._post_consensus( - vertex, - quiet=quiet, - reject_locked_reward=reject_locked_reward - ) + self._post_consensus(vertex, params, quiet=quiet) except BaseException: self._log.error('unexpected exception in on_new_vertex()', vertex=vertex) meta = vertex.get_metadata() @@ -115,13 +151,7 @@ def on_new_vertex( return True - def _validate_vertex( - self, - vertex: BaseTransaction, - *, - fails_silently: bool, - reject_locked_reward: bool, - ) -> bool: + def _validate_vertex(self, vertex: BaseTransaction, params: VerificationParams) -> bool: assert self._tx_storage.is_only_valid_allowed() already_exists = False if self._tx_storage.transaction_exists(vertex.hash): @@ -129,43 +159,27 @@ def _validate_vertex( already_exists = True if vertex.timestamp - self._reactor.seconds() > self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED: - if not fails_silently: - raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={})'.format( - vertex.hash_hex, vertex.timestamp)) - self._log.warn('on_new_tx(): Ignoring transaction in the future', tx=vertex.hash_hex, - future_timestamp=vertex.timestamp) - return False + raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={})'.format( + vertex.hash_hex, vertex.timestamp)) vertex.storage = self._tx_storage try: metadata = vertex.get_metadata() except TransactionDoesNotExist: - if not fails_silently: - raise InvalidNewTransaction('cannot get metadata') - self._log.warn('on_new_tx(): cannot get metadata', tx=vertex.hash_hex) - return False + raise InvalidNewTransaction('cannot get metadata') if already_exists and metadata.validation.is_fully_connected(): - if not fails_silently: - raise InvalidNewTransaction('Transaction already exists {}'.format(vertex.hash_hex)) - self._log.warn('on_new_tx(): Transaction already exists', tx=vertex.hash_hex) - return False + raise InvalidNewTransaction('Transaction already exists {}'.format(vertex.hash_hex)) if metadata.validation.is_invalid(): - if not fails_silently: - raise InvalidNewTransaction('previously marked as invalid') - self._log.warn('on_new_tx(): previously marked as invalid', tx=vertex.hash_hex) - return False + raise InvalidNewTransaction('previously marked as invalid') if not metadata.validation.is_fully_connected(): try: - self._verification_service.validate_full(vertex, reject_locked_reward=reject_locked_reward) + self._verification_service.validate_full(vertex, params) except HathorError as e: - if not fails_silently: - raise InvalidNewTransaction(f'full validation failed: {str(e)}') from e - self._log.warn('on_new_tx(): full validation failed', tx=vertex.hash_hex, exc_info=True) - return False + raise InvalidNewTransaction(f'full validation failed: {str(e)}') from e return True @@ -186,20 +200,21 @@ def _unsafe_save_and_run_consensus(self, vertex: BaseTransaction) -> None: def _post_consensus( self, vertex: BaseTransaction, + params: VerificationParams, *, quiet: bool, - reject_locked_reward: bool, ) -> None: """ Handle operations that need to happen once the tx becomes fully validated. This might happen immediately after we receive the tx, if we have all dependencies already. Or it might happen later. """ + # XXX: during post consensus we don't need to verify weights again, so we can disable it + params = replace(params, skip_block_weight_verification=True) assert self._tx_storage.indexes is not None assert self._verification_service.validate_full( vertex, - skip_block_weight_verification=True, - reject_locked_reward=reject_locked_reward, + params, init_static_metadata=False, ) self._tx_storage.indexes.update(vertex) @@ -237,12 +252,21 @@ def _log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) if self._log_vertex_bytes: kwargs['bytes'] = bytes(tx).hex() if isinstance(tx, Block): - message = message_fmt.format('block') + if not metadata.voided_by: + message = message_fmt.format('block') + else: + message = message_fmt.format('voided block') kwargs['_height'] = tx.get_height() else: - message = message_fmt.format('tx') + if not metadata.voided_by: + message = message_fmt.format('tx') + else: + message = message_fmt.format('voided tx') if not quiet: log_func = self._log.info else: log_func = self._log.debug + + if tx.name: + kwargs['__name'] = tx.name log_func(message, **kwargs) diff --git a/hathor/wallet/keypair.py b/hathor/wallet/keypair.py index d526e1c48..82d1a211d 100644 --- a/hathor/wallet/keypair.py +++ b/hathor/wallet/keypair.py @@ -91,6 +91,20 @@ def to_json(self) -> dict[str, Any]: 'used': self.used, } + def p2pkh_create_input_data(self, password: bytes, data: bytes) -> bytes: + """Return a script input to solve the p2pkh script generated by this key pair.""" + from cryptography.hazmat.primitives import hashes + + from hathor.crypto.util import get_public_key_bytes_compressed + from hathor.transaction.scripts import P2PKH + + private_key = self.get_private_key(password) + public_key = private_key.public_key() + public_key_bytes = get_public_key_bytes_compressed(public_key) + signature = private_key.sign(data, ec.ECDSA(hashes.SHA256())) + script_input = P2PKH.create_input_data(public_key_bytes, signature) + return script_input + @classmethod def from_json(cls, json_data: dict[str, Any]) -> 'KeyPair': priv_key_bytes = base64.b64decode(json_data['privKey']) diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index 2d3cc7492..b78e44871 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -26,6 +26,7 @@ from hathor.transaction import Transaction from hathor.transaction.exceptions import TxValidationError from hathor.util import json_dumpb, json_loadb +from hathor.verification.verification_params import VerificationParams from hathor.wallet.base_wallet import WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import InputDuplicated, InsufficientFunds, InvalidAddress, PrivateKeyNotFound @@ -42,6 +43,7 @@ def __init__(self, manager: HathorManager, settings: HathorSettings) -> None: # Important to have the manager so we can know the tx_storage self.manager = manager self._settings = settings + self.params = VerificationParams.default_for_mempool() def render_POST(self, request): """ POST request for /wallet/send_tokens/ @@ -132,7 +134,7 @@ def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union tx.weight = weight self.manager.cpu_mining_service.resolve(tx) tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.params) return tx def _cb_tx_resolve(self, tx, request): @@ -140,7 +142,7 @@ def _cb_tx_resolve(self, tx, request): """ message = '' try: - success = self.manager.propagate_tx(tx, fails_silently=False) + success = self.manager.propagate_tx(tx) except (InvalidNewTransaction, TxValidationError) as e: success = False message = str(e) diff --git a/hathor/wallet/resources/thin_wallet/address_history.py b/hathor/wallet/resources/thin_wallet/address_history.py index db0e2c221..4fc8fabf8 100644 --- a/hathor/wallet/resources/thin_wallet/address_history.py +++ b/hathor/wallet/resources/thin_wallet/address_history.py @@ -71,7 +71,7 @@ def render_POST(self, request: Request) -> bytes: addresses = post_data['addresses'] assert isinstance(addresses, list) - return self.get_address_history(addresses, post_data.get('hash')) + return self.get_address_history(addresses, post_data.get('hash'), post_data.get('tx_version')) def render_GET(self, request: Request) -> bytes: """ GET request for /thin_wallet/address_history/ @@ -135,7 +135,17 @@ def render_GET(self, request: Request) -> bytes: # If hash parameter is in the request, it must be a valid hex ref_hash = raw_args[b'hash'][0].decode('utf-8') - return self.get_address_history([address.decode('utf-8') for address in addresses], ref_hash) + allowed_tx_versions_arg = raw_args.get(b'tx_version[]', None) + allowed_tx_versions = ( + set([int(tx_version.decode('utf-8')) for tx_version in allowed_tx_versions_arg]) + if allowed_tx_versions_arg is not None + else None + ) + return self.get_address_history( + [address.decode('utf-8') for address in addresses], + ref_hash, + allowed_tx_versions + ) def _validate_index(self, request: Request) -> bytes | None: """Return None if validation is successful (addresses index is enabled), and an error message otherwise.""" @@ -149,7 +159,10 @@ def _validate_index(self, request: Request) -> bytes | None: request.setResponseCode(503) return json_dumpb({'success': False, 'message': 'wallet index is disabled'}) - def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> bytes: + def get_address_history(self, + addresses: list[str], + ref_hash: Optional[str], + allowed_tx_versions: Optional[set[int]]) -> bytes: ref_hash_bytes = None if ref_hash: try: @@ -209,6 +222,10 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> if tx_hash not in seen: tx = self.manager.tx_storage.get_transaction(tx_hash) + if allowed_tx_versions and tx.version not in allowed_tx_versions: + # Transaction version is not in the version filter + continue + tx_elements = len(tx.inputs) + len(tx.outputs) if total_elements + tx_elements > self.max_inputs_outputs_address_history: # If the adition of this tx overcomes the maximum number of inputs and outputs, then break @@ -277,6 +294,22 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> 'type': 'string' } }, + { + 'name': 'hash', + 'in': 'query', + 'description': 'Hash used to paginate the request.', + 'schema': { + 'type': 'string' + } + }, + { + 'name': 'tx_version[]', + 'in': 'query', + 'description': 'List of versions to filter the transactions.', + 'schema': { + 'type': 'int' + } + }, ], 'responses': { '200': { diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index 0ab18f5b9..cd4076cca 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -31,6 +31,7 @@ from hathor.transaction import Transaction from hathor.transaction.exceptions import TxValidationError from hathor.util import json_dumpb, json_loadb +from hathor.verification.verification_params import VerificationParams logger = get_logger() @@ -60,6 +61,7 @@ def __init__(self, manager): self.sleep_seconds = 0 self.log = logger.new() self.reactor = get_global_reactor() + self.params = VerificationParams.default_for_mempool() def render_POST(self, request: Request) -> Any: """ POST request for /thin_wallet/send_tokens/ @@ -214,7 +216,7 @@ def _stratum_deferred_resolve(self, context: _Context) -> None: def _stratum_thread_verify(self, context: _Context) -> _Context: """ Method to verify the transaction that runs in a separated thread """ - self.manager.verification_service.verify(context.tx) + self.manager.verification_service.verify(context.tx, self.params) return context def _stratum_timeout(self, result: Failure, timeout: int, *, context: _Context) -> None: @@ -271,7 +273,7 @@ def _should_stop(): raise CancelledError() context.tx.update_hash() context.tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(context.tx) + self.manager.verification_service.verify(context.tx, self.params) return context def _cb_tx_resolve(self, context: _Context) -> None: @@ -282,7 +284,7 @@ def _cb_tx_resolve(self, context: _Context) -> None: message = '' return_code = '' try: - success = self.manager.propagate_tx(tx, fails_silently=False) + success = self.manager.propagate_tx(tx) if success: return_code = 'success' else: diff --git a/hathor/wallet/resources/thin_wallet/tokens.py b/hathor/wallet/resources/thin_wallet/tokens.py index fcd29d476..babb81d29 100644 --- a/hathor/wallet/resources/thin_wallet/tokens.py +++ b/hathor/wallet/resources/thin_wallet/tokens.py @@ -21,6 +21,8 @@ from hathor.conf.get_settings import get_global_settings from hathor.util import is_token_uid_valid, json_dumpb +_MAX_UTXO_LIST_LENGTH: int = 100 + @register_resource class TokenResource(Resource): @@ -42,18 +44,22 @@ def get_one_token_data(self, token_uid: bytes) -> dict[str, Any]: except KeyError: return {'success': False, 'message': 'Unknown token'} - mint = [] - melt = [] + mint: list[dict[str, Any]] = [] + melt: list[dict[str, Any]] = [] transactions_count = tokens_index.get_transactions_count(token_uid) for tx_hash, index in token_info.iter_mint_utxos(): + if len(mint) >= _MAX_UTXO_LIST_LENGTH: + break mint.append({ 'tx_id': tx_hash.hex(), 'index': index }) for tx_hash, index in token_info.iter_melt_utxos(): + if len(melt) >= _MAX_UTXO_LIST_LENGTH: + break melt.append({ 'tx_id': tx_hash.hex(), 'index': index @@ -63,8 +69,12 @@ def get_one_token_data(self, token_uid: bytes) -> dict[str, Any]: 'name': token_info.get_name(), 'symbol': token_info.get_symbol(), 'success': True, + # XXX: mint and melt keys are deprecated and we should remove them from the API soon. + # They're a truncated list with up to _MAX_UTXO_LIST_LENGTH items. 'mint': mint, 'melt': melt, + 'can_mint': token_info.can_mint(), + 'can_melt': token_info.can_melt(), 'total': token_info.get_total(), 'transactions_count': transactions_count, } @@ -200,6 +210,8 @@ def render_GET(self, request: Request) -> bytes: "index": 1 } ], + 'can_mint': True, + 'can_melt': True, 'total': 50000, 'transactions_count': 3, } diff --git a/hathor/websocket/factory.py b/hathor/websocket/factory.py index 212b327eb..d71b9dbea 100644 --- a/hathor/websocket/factory.py +++ b/hathor/websocket/factory.py @@ -141,7 +141,7 @@ def disable_history_streaming(self) -> None: """Disable history streaming for all connections.""" self.is_history_streaming_enabled = False for conn in self.connections: - self.disable_history_streaming() + conn.disable_history_streaming() def _setup_rate_limit(self): """ Set the limit of the RateLimiter and start the buffer deques with BUFFER_SIZE diff --git a/poetry.lock b/poetry.lock index 2ae3ad105..5ac42efdd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -6,6 +6,7 @@ version = "2.3.5" description = "Happy Eyeballs for asyncio" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiohappyeyeballs-2.3.5-py3-none-any.whl", hash = "sha256:4d6dea59215537dbc746e93e779caea8178c866856a721c9c660d7a5a7b8be03"}, {file = "aiohappyeyeballs-2.3.5.tar.gz", hash = "sha256:6fa48b9f1317254f122a07a131a86b71ca6946ca989ce6326fff54a99a920105"}, @@ -17,6 +18,7 @@ version = "3.10.3" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc36cbdedf6f259371dbbbcaae5bb0e95b879bc501668ab6306af867577eb5db"}, {file = "aiohttp-3.10.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85466b5a695c2a7db13eb2c200af552d13e6a9313d7fa92e4ffe04a2c0ea74c1"}, @@ -99,14 +101,13 @@ files = [ [package.dependencies] aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" -async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiosignal" @@ -114,6 +115,7 @@ version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, @@ -128,6 +130,8 @@ version = "0.1.3" description = "Disable App Nap on macOS >= 10.9" optional = false python-versions = "*" +groups = ["main"] +markers = "sys_platform == \"darwin\" or platform_system == \"Darwin\"" files = [ {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, @@ -139,6 +143,7 @@ version = "2.4.1" description = "Annotate AST trees with source code positions" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, @@ -148,19 +153,8 @@ files = [ six = ">=1.12.0" [package.extras] -astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] -test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] +astroid = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\""] +test = ["astroid (>=1,<2) ; python_version < \"3\"", "astroid (>=2,<4) ; python_version >= \"3\"", "pytest"] [[package]] name = "attrs" @@ -168,6 +162,7 @@ version = "23.1.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, @@ -178,7 +173,7 @@ cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] dev = ["attrs[docs,tests]", "pre-commit"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-no-zope = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.1.1) ; platform_python_implementation == \"CPython\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version < \"3.11\"", "pytest-xdist[psutil]"] [[package]] name = "autobahn" @@ -186,6 +181,7 @@ version = "24.4.2" description = "WebSocket client & server library, WAMP real-time framework" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "autobahn-24.4.2-py2.py3-none-any.whl", hash = "sha256:c56a2abe7ac78abbfb778c02892d673a4de58fd004d088cd7ab297db25918e81"}, {file = "autobahn-24.4.2.tar.gz", hash = "sha256:a2d71ef1b0cf780b6d11f8b205fd2c7749765e65795f2ea7d823796642ee92c9"}, @@ -198,13 +194,13 @@ setuptools = "*" txaio = ">=21.2.1" [package.extras] -all = ["PyGObject (>=3.40.0)", "argon2-cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "flatbuffers (>=22.12.6)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service-identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "twisted (>=24.3.0)", "u-msgpack-python (>=2.1)", "ujson (>=4.0.2)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] +all = ["PyGObject (>=3.40.0)", "argon2-cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "flatbuffers (>=22.12.6)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2) ; platform_python_implementation == \"CPython\"", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service-identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "twisted (>=24.3.0)", "u-msgpack-python (>=2.1) ; platform_python_implementation != \"CPython\"", "ujson (>=4.0.2) ; platform_python_implementation == \"CPython\"", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] compress = ["python-snappy (>=0.6.0)"] -dev = ["backports.tempfile (>=1.0)", "build (>=1.2.1)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=4.2.8)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] +dev = ["backports.tempfile (>=1.0)", "build (>=1.2.1)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610) ; python_version >= \"3.4\" and platform_python_implementation != \"PyPy\"", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=4.2.8)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=22.10.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service-identity (>=18.1.0)"] nvx = ["cffi (>=1.14.5)"] scram = ["argon2-cffi (>=20.1.0)", "cffi (>=1.14.5)", "passlib (>=1.7.4)"] -serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=22.12.6)", "msgpack (>=1.0.2)", "py-ubjson (>=0.16.1)", "u-msgpack-python (>=2.1)", "ujson (>=4.0.2)"] +serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=22.12.6)", "msgpack (>=1.0.2) ; platform_python_implementation == \"CPython\"", "py-ubjson (>=0.16.1)", "u-msgpack-python (>=2.1) ; platform_python_implementation != \"CPython\"", "ujson (>=4.0.2) ; platform_python_implementation == \"CPython\""] twisted = ["attrs (>=20.3.0)", "twisted (>=24.3.0)", "zope.interface (>=5.2.0)"] ui = ["PyGObject (>=3.40.0)"] xbr = ["base58 (>=2.1.0)", "bitarray (>=2.7.5)", "cbor2 (>=5.2.0)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=4.0.0)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "rlp (>=2.0.1)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "web3[ipfs] (>=6.0.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)"] @@ -215,6 +211,7 @@ version = "22.10.0" description = "Self-service finite-state machines for the programmer on the go." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"}, {file = "Automat-22.10.0.tar.gz", hash = "sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e"}, @@ -233,6 +230,7 @@ version = "0.2.0" description = "Specifications for callback functions passed in to an API" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, @@ -244,6 +242,7 @@ version = "2.1.1" description = "Base58 and Base58Check implementation." optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2"}, {file = "base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c"}, @@ -258,6 +257,7 @@ version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, @@ -269,6 +269,8 @@ version = "1.16.0" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" or implementation_name == \"pypy\"" files = [ {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, @@ -333,6 +335,7 @@ version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.6.0" +groups = ["main"] files = [ {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, @@ -347,6 +350,7 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -358,6 +362,7 @@ version = "0.2.1" description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "comm-0.2.1-py3-none-any.whl", hash = "sha256:87928485c0dfc0e7976fd89fc1e187023cf587e7c353e4a9b417555b44adf021"}, {file = "comm-0.2.1.tar.gz", hash = "sha256:0bc91edae1344d39d3661dcbc36937181fdaddb304790458f8b044dbc064b89a"}, @@ -375,6 +380,7 @@ version = "1.5.5" description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "ConfigArgParse-1.5.5-py3-none-any.whl", hash = "sha256:541360ddc1b15c517f95c0d02d1fca4591266628f3667acdc5d13dccc78884ca"}, {file = "ConfigArgParse-1.5.5.tar.gz", hash = "sha256:363d80a6d35614bd446e2f2b1b216f3b33741d03ac6d0a92803306f40e555b58"}, @@ -390,6 +396,7 @@ version = "23.10.4" description = "Symbolic constants in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9"}, {file = "constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd"}, @@ -401,6 +408,7 @@ version = "7.4.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a"}, {file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471"}, @@ -456,11 +464,8 @@ files = [ {file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e"}, ] -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] [[package]] name = "cryptography" @@ -468,6 +473,7 @@ version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, @@ -522,6 +528,7 @@ version = "1.8.0" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "debugpy-1.8.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7fb95ca78f7ac43393cd0e0f2b6deda438ec7c5e47fa5d38553340897d2fbdfb"}, {file = "debugpy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef9ab7df0b9a42ed9c878afd3eaaff471fce3fa73df96022e1f5c9f8f8c87ada"}, @@ -549,31 +556,19 @@ version = "5.1.1" description = "Decorators for Humans" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, ] -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - [[package]] name = "execnet" version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -588,13 +583,14 @@ version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, ] [package.extras] -tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich ; python_version >= \"3.11\""] [[package]] name = "flake8" @@ -602,6 +598,7 @@ version = "7.1.1" description = "the modular source code checker: pep8 pyflakes and co" optional = false python-versions = ">=3.8.1" +groups = ["dev"] files = [ {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, @@ -618,6 +615,7 @@ version = "3.8.1" description = "Plugin for pytest that automatically reruns flaky tests." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "flaky-3.8.1-py2.py3-none-any.whl", hash = "sha256:194ccf4f0d3a22b2de7130f4b62e45e977ac1b5ccad74d4d48f3005dcc38815e"}, {file = "flaky-3.8.1.tar.gz", hash = "sha256:47204a81ec905f3d5acfbd61daeabcada8f9d4031616d9bcb0618461729699f5"}, @@ -629,6 +627,7 @@ version = "1.4.1" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, @@ -715,6 +714,7 @@ version = "0.20.1" description = "Simple Python interface for Graphviz" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, {file = "graphviz-0.20.1.zip", hash = "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"}, @@ -727,13 +727,14 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= [[package]] name = "hathorlib" -version = "0.6.1" +version = "0.11.0" description = "Hathor Network base objects library" optional = false python-versions = "<4,>=3.9" +groups = ["main"] files = [ - {file = "hathorlib-0.6.1-py3-none-any.whl", hash = "sha256:d5c004379bf46e334161c9b9566afb5b52ab73f1ec9b037567b50ca20083531d"}, - {file = "hathorlib-0.6.1.tar.gz", hash = "sha256:a0c6be59bfd759598d15d358f77b903c3feb3eecb3a6f8249dd593063aa49ac1"}, + {file = "hathorlib-0.11.0-py3-none-any.whl", hash = "sha256:2ff1c62bf34daadb562b91079eed36c00d25da68e2350c95f3aedd4fe990a17c"}, + {file = "hathorlib-0.11.0.tar.gz", hash = "sha256:b276b52bb651d2c2e575bb2cbff9195dbfde26ef7b8e2bc4944f69900bc31b6a"}, ] [package.dependencies] @@ -750,6 +751,7 @@ version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, @@ -764,6 +766,7 @@ version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, @@ -775,6 +778,7 @@ version = "24.7.2" description = "A small library that versions your Python projects." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, @@ -782,7 +786,6 @@ files = [ [package.dependencies] setuptools = ">=61.0" -tomli = {version = "*", markers = "python_version < \"3.11\""} [package.extras] scripts = ["click (>=6.0)"] @@ -793,6 +796,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -804,6 +808,7 @@ version = "3.1.0" description = "Editable interval tree data structure for Python 2 and 3" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d"}, ] @@ -817,6 +822,7 @@ version = "6.28.0" description = "IPython Kernel for Jupyter" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "ipykernel-6.28.0-py3-none-any.whl", hash = "sha256:c6e9a9c63a7f4095c0a22a79f765f079f9ec7be4f2430a898ddea889e8665661"}, {file = "ipykernel-6.28.0.tar.gz", hash = "sha256:69c11403d26de69df02225916f916b37ea4b9af417da0a8c827f84328d88e5f3"}, @@ -850,6 +856,7 @@ version = "8.7.0" description = "IPython: Productive Interactive Computing" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, {file = "ipython-8.7.0.tar.gz", hash = "sha256:882899fe78d5417a0aa07f995db298fa28b58faeba2112d2e3a4c95fe14bb738"}, @@ -889,6 +896,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -906,6 +914,7 @@ version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, @@ -925,6 +934,7 @@ version = "8.6.0" description = "Jupyter protocol implementation and client libraries" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jupyter_client-8.6.0-py3-none-any.whl", hash = "sha256:909c474dbe62582ae62b758bca86d6518c85234bdee2d908c778db6d72f39d99"}, {file = "jupyter_client-8.6.0.tar.gz", hash = "sha256:0642244bb83b4764ae60d07e010e15f0e2d275ec4e918a8f7b80fbbef3ca60c7"}, @@ -939,7 +949,7 @@ traitlets = ">=5.3" [package.extras] docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] -test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko ; sys_platform == \"win32\"", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] [[package]] name = "jupyter-core" @@ -947,6 +957,7 @@ version = "5.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "jupyter_core-5.7.1-py3-none-any.whl", hash = "sha256:c65c82126453a723a2804aa52409930434598fd9d35091d63dfb919d2b765bb7"}, {file = "jupyter_core-5.7.1.tar.gz", hash = "sha256:de61a9d7fc71240f688b2fb5ab659fbb56979458dc66a71decd098e03c79e218"}, @@ -967,6 +978,7 @@ version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, @@ -981,6 +993,7 @@ version = "0.7.0" description = "McCabe checker, plugin for flake8" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, @@ -992,6 +1005,7 @@ version = "0.20" description = "Implementation of Bitcoin BIP-0039" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "mnemonic-0.20-py3-none-any.whl", hash = "sha256:acd2168872d0379e7a10873bb3e12bf6c91b35de758135c4fbd1015ef18fafc5"}, {file = "mnemonic-0.20.tar.gz", hash = "sha256:7c6fb5639d779388027a77944680aee4870f0fcd09b1e42a5525ee2ce4c625f6"}, @@ -1003,6 +1017,7 @@ version = "6.0.4" description = "multidict implementation" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, @@ -1086,6 +1101,8 @@ version = "1.10.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, @@ -1118,7 +1135,6 @@ files = [ [package.dependencies] mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.1.0" [package.extras] @@ -1133,6 +1149,8 @@ version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, @@ -1144,6 +1162,8 @@ version = "1.0.5" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "mypy_zope-1.0.5.tar.gz", hash = "sha256:2440406d49c0e1199c1cd819c92a2c4957de65579c6abc8a081c927f4bdc8d49"}, ] @@ -1162,6 +1182,7 @@ version = "1.5.8" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" +groups = ["main"] files = [ {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, @@ -1173,6 +1194,7 @@ version = "23.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, @@ -1184,6 +1206,7 @@ version = "0.8.3" description = "A Python Parser" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, @@ -1199,6 +1222,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1210,6 +1234,7 @@ version = "4.8.0" description = "Pexpect allows easy control of interactive console applications." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, @@ -1224,6 +1249,7 @@ version = "0.7.5" description = "Tiny 'shelve'-like database with concurrency support" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, @@ -1235,6 +1261,7 @@ version = "4.1.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, @@ -1250,6 +1277,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -1265,6 +1293,7 @@ version = "0.15.0" description = "Python client for the Prometheus monitoring system." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, @@ -1279,6 +1308,7 @@ version = "3.0.43" description = "Library for building powerful interactive command lines in Python" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, @@ -1293,6 +1323,7 @@ version = "5.9.7" description = "Cross-platform lib for process and system monitoring in Python." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["main"] files = [ {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, @@ -1313,7 +1344,7 @@ files = [ ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +test = ["enum34 ; python_version <= \"3.4\"", "ipaddress ; python_version < \"3.0\"", "mock ; python_version < \"3.0\"", "pywin32 ; sys_platform == \"win32\"", "wmi ; sys_platform == \"win32\""] [[package]] name = "ptyprocess" @@ -1321,6 +1352,7 @@ version = "0.7.0" description = "Run a subprocess in a pseudo terminal" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, @@ -1332,6 +1364,7 @@ version = "0.2.2" description = "Safely evaluate AST nodes without side effects" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, @@ -1346,6 +1379,7 @@ version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, @@ -1357,6 +1391,7 @@ version = "0.3.0" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, @@ -1371,6 +1406,7 @@ version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, @@ -1382,6 +1418,7 @@ version = "0.92.20230326" description = "Utilities for Bitcoin and altcoin addresses and transaction manipulation." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pycoin-0.92.20230326.tar.gz", hash = "sha256:0d85f0013447c356b2f6cc0bb903ad07ee4b72805ee13b40296cd0831112c0df"}, ] @@ -1392,6 +1429,8 @@ version = "2.21" description = "C parser in Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" or implementation_name == \"pypy\"" files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, @@ -1403,6 +1442,7 @@ version = "1.10.17" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, @@ -1462,6 +1502,7 @@ version = "3.2.0" description = "passive checker of Python programs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, @@ -1473,13 +1514,14 @@ version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, ] [package.extras] -plugins = ["importlib-metadata"] +plugins = ["importlib-metadata ; python_version < \"3.8\""] windows-terminal = ["colorama (>=0.4.6)"] [[package]] @@ -1488,6 +1530,7 @@ version = "24.2.1" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, @@ -1506,6 +1549,7 @@ version = "8.3.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, @@ -1513,11 +1557,9 @@ files = [ [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] @@ -1528,6 +1570,7 @@ version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, @@ -1546,6 +1589,7 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -1566,6 +1610,7 @@ version = "2.8.2" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, @@ -1580,6 +1625,7 @@ version = "0.1.1" description = "Opinionated healthcheck library" optional = false python-versions = ">=3.8.1,<4.0.0" +groups = ["main"] files = [ {file = "python_healthchecklib-0.1.1-py3-none-any.whl", hash = "sha256:51ad9e7e782145977bf322cbe2095198a8b61473b09d43e79018e47483840d15"}, {file = "python_healthchecklib-0.1.1.tar.gz", hash = "sha256:bac6cdd9ef5825f6deb0cbe5f6d97260f3f402e111fc7fe2146444bdb77fd892"}, @@ -1591,6 +1637,8 @@ version = "306" description = "Python for Window Extensions" optional = false python-versions = "*" +groups = ["main"] +markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" files = [ {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, @@ -1614,6 +1662,7 @@ version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, @@ -1633,6 +1682,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -1673,6 +1723,7 @@ version = "25.1.2" description = "Python bindings for 0MQ" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:e624c789359f1a16f83f35e2c705d07663ff2b4d4479bad35621178d8f0f6ea4"}, {file = "pyzmq-25.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49151b0efece79f6a79d41a461d78535356136ee70084a1c22532fc6383f4ad0"}, @@ -1778,6 +1829,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -1795,16 +1847,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rocksdb" -version = "0.9.2" +version = "0.9.3" description = "Python bindings for RocksDB" optional = false -python-versions = "*" +python-versions = ">=3.10" +groups = ["main"] files = [] develop = false -[package.dependencies] -setuptools = ">=25" - [package.extras] doc = ["sphinx", "sphinx_rtd_theme"] test = ["pytest"] @@ -1813,7 +1863,7 @@ test = ["pytest"] type = "git" url = "https://github.com/hathornetwork/python-rocksdb.git" reference = "HEAD" -resolved_reference = "72edcfbd22f4a3ca816f94096d3ec181da41031e" +resolved_reference = "1f0ce6a35472ad2e631335f159db9906ed2ebc86" [[package]] name = "sentry-sdk" @@ -1821,6 +1871,8 @@ version = "1.39.1" description = "Python client for Sentry (https://sentry.io)" optional = true python-versions = "*" +groups = ["main"] +markers = "extra == \"sentry\"" files = [ {file = "sentry-sdk-1.39.1.tar.gz", hash = "sha256:320a55cdf9da9097a0bead239c35b7e61f53660ef9878861824fd6d9b2eaf3b5"}, {file = "sentry_sdk-1.39.1-py2.py3-none-any.whl", hash = "sha256:81b5b9ffdd1a374e9eb0c053b5d2012155db9cbe76393a8585677b753bd5fdc1"}, @@ -1866,6 +1918,7 @@ version = "21.1.0" description = "Service identity verification for pyOpenSSL & cryptography." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, {file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"}, @@ -1890,6 +1943,7 @@ version = "1.3.3" description = "A Python module to customize the process title" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:897a73208da48db41e687225f355ce993167079eda1260ba5e13c4e53be7f754"}, {file = "setproctitle-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8c331e91a14ba4076f88c29c777ad6b58639530ed5b24b5564b5ed2fd7a95452"}, @@ -1990,14 +2044,16 @@ version = "69.0.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, ] +markers = {dev = "implementation_name == \"cpython\""} [package.extras] docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov ; platform_python_implementation != \"PyPy\"", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\"", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-ruff ; sys_platform != \"cygwin\"", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] [[package]] @@ -2006,6 +2062,7 @@ version = "1.16.0" description = "Python 2 and 3 compatibility utilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, @@ -2017,6 +2074,7 @@ version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -2028,6 +2086,7 @@ version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, @@ -2047,6 +2106,7 @@ version = "22.3.0" description = "Structured Logging for Python" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "structlog-22.3.0-py3-none-any.whl", hash = "sha256:b403f344f902b220648fa9f286a23c0cc5439a5844d271fec40562dbadbc70ad"}, {file = "structlog-22.3.0.tar.gz", hash = "sha256:e7509391f215e4afb88b1b80fa3ea074be57a5a17d794bd436a5c949da023333"}, @@ -2064,6 +2124,8 @@ version = "1.4.0" description = "Sentry integration for structlog" optional = true python-versions = ">=3.6,<4.0" +groups = ["main"] +markers = "extra == \"sentry\"" files = [ {file = "structlog-sentry-1.4.0.tar.gz", hash = "sha256:5fc6cfab71b858d71433e68cc5af79a396e72015003931507e340b3687ebb0a8"}, {file = "structlog_sentry-1.4.0-py3-none-any.whl", hash = "sha256:04627538e13bb0719a8806353279d40c1d1afb3eb2053817820754b9a08814a7"}, @@ -2072,23 +2134,13 @@ files = [ [package.dependencies] sentry-sdk = "*" -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - [[package]] name = "tornado" version = "6.4" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." optional = false python-versions = ">= 3.8" +groups = ["main"] files = [ {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, @@ -2109,6 +2161,7 @@ version = "5.14.0" description = "Traitlets Python configuration system" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "traitlets-5.14.0-py3-none-any.whl", hash = "sha256:f14949d23829023013c47df20b4a76ccd1a85effb786dc060f34de7948361b33"}, {file = "traitlets-5.14.0.tar.gz", hash = "sha256:fcdaa8ac49c04dfa0ed3ee3384ef6dfdb5d6f3741502be247279407679296772"}, @@ -2124,6 +2177,7 @@ version = "24.7.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"}, {file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"}, @@ -2139,19 +2193,19 @@ typing-extensions = ">=4.2.0" zope-interface = ">=5" [package.extras] -all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] dev-release = ["pydoctor (>=23.9.0,<23.10.0)", "pydoctor (>=23.9.0,<23.10.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] -gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] -mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] -osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] -serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] +macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] +osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +serial = ["pyserial (>=3.0)", "pywin32 (!=226) ; platform_system == \"Windows\""] test = ["cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] +windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226) ; platform_system == \"Windows\"", "pywin32 (!=226) ; platform_system == \"Windows\"", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] [[package]] name = "txaio" @@ -2159,6 +2213,7 @@ version = "23.1.1" description = "Compatibility API between asyncio/Twisted/Trollius" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "txaio-23.1.1-py2.py3-none-any.whl", hash = "sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490"}, {file = "txaio-23.1.1.tar.gz", hash = "sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704"}, @@ -2175,6 +2230,7 @@ version = "3.3.23.2" description = "Typing stubs for cryptography" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-cryptography-3.3.23.2.tar.gz", hash = "sha256:09cc53f273dd4d8c29fa7ad11fefd9b734126d467960162397bc5e3e604dea75"}, {file = "types_cryptography-3.3.23.2-py3-none-any.whl", hash = "sha256:b965d548f148f8e87f353ccf2b7bd92719fdf6c845ff7cedf2abb393a0643e4f"}, @@ -2186,6 +2242,7 @@ version = "22.1.0.2" description = "Typing stubs for pyOpenSSL" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-pyOpenSSL-22.1.0.2.tar.gz", hash = "sha256:7a350e29e55bc3ee4571f996b4b1c18c4e4098947db45f7485b016eaa35b44bc"}, {file = "types_pyOpenSSL-22.1.0.2-py3-none-any.whl", hash = "sha256:54606a6afb203eb261e0fca9b7f75fa6c24d5ff71e13903c162ffb951c2c64c6"}, @@ -2200,6 +2257,7 @@ version = "6.0.12.9" description = "Typing stubs for PyYAML" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-PyYAML-6.0.12.9.tar.gz", hash = "sha256:c51b1bd6d99ddf0aa2884a7a328810ebf70a4262c292195d3f4f9a0005f9eeb6"}, {file = "types_PyYAML-6.0.12.9-py3-none-any.whl", hash = "sha256:5aed5aa66bd2d2e158f75dda22b059570ede988559f030cf294871d3b647e3e8"}, @@ -2211,6 +2269,7 @@ version = "2.28.11.4" description = "Typing stubs for requests" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-requests-2.28.11.4.tar.gz", hash = "sha256:d4f342b0df432262e9e326d17638eeae96a5881e78e7a6aae46d33870d73952e"}, {file = "types_requests-2.28.11.4-py3-none-any.whl", hash = "sha256:bdb1f9811e53d0642c8347b09137363eb25e1a516819e190da187c29595a1df3"}, @@ -2225,6 +2284,7 @@ version = "1.26.25.14" description = "Typing stubs for urllib3" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -2236,6 +2296,7 @@ version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -2247,14 +2308,15 @@ version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +groups = ["main"] files = [ {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] -brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotli (==1.0.9) ; os_name != \"nt\" and python_version < \"3\" and platform_python_implementation == \"CPython\"", "brotli (>=1.0.9) ; python_version >= \"3\" and platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; (os_name != \"nt\" or python_version >= \"3\") and platform_python_implementation != \"CPython\"", "brotlipy (>=0.6.0) ; os_name == \"nt\" and python_version < \"3\""] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress ; python_version == \"2.7\"", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -2263,6 +2325,7 @@ version = "0.2.12" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "wcwidth-0.2.12-py2.py3-none-any.whl", hash = "sha256:f26ec43d96c8cbfed76a5075dac87680124fa84e0855195a6184da9c187f133c"}, {file = "wcwidth-0.2.12.tar.gz", hash = "sha256:f01c104efdf57971bcb756f054dd58ddec5204dd15fa31d6503ea57947d97c02"}, @@ -2274,6 +2337,7 @@ version = "1.35.1" description = "A linter for YAML files." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "yamllint-1.35.1-py3-none-any.whl", hash = "sha256:2e16e504bb129ff515b37823b472750b36b6de07963bd74b307341ef5ad8bdc3"}, {file = "yamllint-1.35.1.tar.gz", hash = "sha256:7a003809f88324fd2c877734f2d575ee7881dd9043360657cc8049c809eba6cd"}, @@ -2292,6 +2356,7 @@ version = "1.9.4" description = "Yet another URL library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, @@ -2395,6 +2460,8 @@ version = "5.0" description = "Very basic event publishing system" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, @@ -2413,6 +2480,7 @@ version = "6.1" description = "Interfaces for Python" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"}, {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"}, @@ -2451,6 +2519,7 @@ files = [ {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"}, {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"}, ] +markers = {dev = "implementation_name == \"cpython\""} [package.dependencies] setuptools = "*" @@ -2466,6 +2535,8 @@ version = "7.0.1" description = "zope.interface extension for defining data schemas" optional = false python-versions = ">=3.7" +groups = ["dev"] +markers = "implementation_name == \"cpython\"" files = [ {file = "zope.schema-7.0.1-py3-none-any.whl", hash = "sha256:cf006c678793b00e0075ad54d55281c8785ea21e5bc1f5ec0584787719c2aab2"}, {file = "zope.schema-7.0.1.tar.gz", hash = "sha256:ead4dbcb03354d4e410c9a3b904451eb44d90254751b1cbdedf4a61aede9fbb9"}, @@ -2484,6 +2555,6 @@ test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] sentry = ["sentry-sdk", "structlog-sentry"] [metadata] -lock-version = "2.0" -python-versions = ">=3.10,<4" -content-hash = "05a728b943ae8b639bbb369f400bb7ed5b6c0c5205abaf355194c7168b4798c7" +lock-version = "2.1" +python-versions = ">=3.11,<4" +content-hash = "11820c4aa99b3e99394bca09b415e867ad016c706c372a515aa031063db5998f" diff --git a/pyproject.toml b/pyproject.toml index 13079d46c..a4c1cf41b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.63.1" +version = "0.64.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" @@ -23,7 +23,6 @@ homepage = "https://hathor.network/" repository = "https://github.com/HathorNetwork/hathor-core/" # https://pypi.org/classifiers/ classifiers = [ - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", @@ -35,7 +34,7 @@ exclude = ["tests", "tests.*"] [tool.poetry.scripts] hathor-cli = 'hathor.cli.main:main' -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] flake8 = "~7.1.1" isort = {version = "~5.13.2", extras = ["colors"]} mypy = {version = "^1.10.1", markers = "implementation_name == 'cpython'"} @@ -51,7 +50,7 @@ types-pyopenssl = "=22.1.0.2" types-pyyaml = "=6.0.12.9" [tool.poetry.dependencies] -python = ">=3.10,<4" +python = ">=3.11,<4" twisted = "~24.7.0" autobahn = "~24.4.2" base58 = "~2.1.1" @@ -75,7 +74,7 @@ idna = "~3.4" setproctitle = "^1.3.3" sentry-sdk = {version = "^1.5.11", optional = true} structlog-sentry = {version = "^1.4.0", optional = true} -hathorlib = "^0.6.1" +hathorlib = "^0.11.0" pydantic = "~1.10.17" pyyaml = "^6.0.1" typing-extensions = "~4.12.2" @@ -176,6 +175,7 @@ addopts = "-n auto" markers = [ "slow", ] +norecursedirs = ["tests/nanocontracts/test_blueprints"] [build-system] requires = ["poetry-core >= 1.3.2", "cython < 0.30"] diff --git a/tests/cli/test_cli_main.py b/tests/cli/test_cli_main.py index 3acf455df..185938635 100644 --- a/tests/cli/test_cli_main.py +++ b/tests/cli/test_cli_main.py @@ -23,3 +23,27 @@ def test_init(self): # 3 is the number of prints we have without any command self.assertTrue(len(output) >= 3) + + def test_help(self): + import sys + + # basically making sure importing works + cli = main.CliManager() + + # Help method only prints on the screen + # So just making sure it has no errors + f = StringIO() + with self.assertRaises(SystemExit) as cm: + with capture_logs(): + with redirect_stdout(f): + sys.argv = ['hathor-core', 'run_node', '--help'] + cli.execute_from_command_line() + + # Must exit with code 0 + self.assertEqual(cm.exception.args[0], 0) + + # Transforming prints str in array + output = f.getvalue().strip().splitlines() + + # The help output will normally contain at least 80 lines + self.assertGreaterEqual(len(output), 80) diff --git a/tests/cli/test_db_export.py b/tests/cli/test_db_export.py index c4bfbb027..89c767005 100644 --- a/tests/cli/test_db_export.py +++ b/tests/cli/test_db_export.py @@ -8,5 +8,5 @@ class TestDbExport(unittest.TestCase): def test_db_export(self): tmp_dir = self.mkdtemp() tmp_file = os.path.join(tmp_dir, 'test_file') - db_export = DbExport(argv=['--memory-storage', '--export-file', tmp_file]) + db_export = DbExport(argv=['--temp-data', '--export-file', tmp_file]) assert db_export is not None diff --git a/tests/cli/test_db_import.py b/tests/cli/test_db_import.py index 8be9cfcaf..a7e4ece39 100644 --- a/tests/cli/test_db_import.py +++ b/tests/cli/test_db_import.py @@ -7,5 +7,5 @@ class TestDbImport(unittest.TestCase): def test_db_import(self): _, tmp_file = tempfile.mkstemp() - db_import = DbImport(argv=['--memory-storage', '--import-file', tmp_file]) + db_import = DbImport(argv=['--temp-data', '--import-file', tmp_file]) assert db_import is not None diff --git a/tests/cli/test_multisig_spend.py b/tests/cli/test_multisig_spend.py index 7b427ed78..ebbf61dd2 100644 --- a/tests/cli/test_multisig_spend.py +++ b/tests/cli/test_multisig_spend.py @@ -118,4 +118,4 @@ def test_spend_multisig(self): tx_raw = output[0].split(':')[1].strip() tx = Transaction.create_from_struct(bytes.fromhex(tx_raw)) - self.assertTrue(self.manager.propagate_tx(tx, False)) + self.assertTrue(self.manager.propagate_tx(tx)) diff --git a/tests/cli/test_quick_test.py b/tests/cli/test_quick_test.py index 9257d6f7b..1d3d866c4 100644 --- a/tests/cli/test_quick_test.py +++ b/tests/cli/test_quick_test.py @@ -11,7 +11,7 @@ def start_manager(self) -> None: def register_signal_handlers(self) -> None: pass - quick_test = CustomQuickTest(argv=['--memory-storage', '--no-wait']) + quick_test = CustomQuickTest(argv=['--temp-data', '--no-wait']) assert quick_test is not None self.clean_pending(required_to_quiesce=False) diff --git a/tests/cli/test_run_node.py b/tests/cli/test_run_node.py index 84d73d2ef..57a71e649 100644 --- a/tests/cli/test_run_node.py +++ b/tests/cli/test_run_node.py @@ -7,7 +7,7 @@ class RunNodeTest(unittest.TestCase): # In this case we just want to go through the code to see if it's okay - def test_memory_storage(self): + def test_temp_data(self): class CustomRunNode(RunNode): def start_manager(self) -> None: pass @@ -15,7 +15,7 @@ def start_manager(self) -> None: def register_signal_handlers(self) -> None: pass - run_node = CustomRunNode(argv=['--memory-storage']) + run_node = CustomRunNode(argv=['--temp-data']) self.assertTrue(run_node is not None) @patch('twisted.internet.reactor.listenTCP') @@ -28,7 +28,7 @@ def start_manager(self) -> None: def register_signal_handlers(self) -> None: pass - run_node = CustomRunNode(argv=['--memory-storage', '--status', '1234']) + run_node = CustomRunNode(argv=['--temp-data', '--status', '1234']) self.assertTrue(run_node is not None) mock_listenTCP.assert_called_with(1234, ANY) @@ -43,7 +43,7 @@ def start_manager(self) -> None: def register_signal_handlers(self) -> None: pass - run_node = CustomRunNode(argv=['--memory-storage', '--x-enable-ipv6', '--status', '1234']) + run_node = CustomRunNode(argv=['--temp-data', '--x-enable-ipv6', '--status', '1234']) self.assertTrue(run_node is not None) mock_listenTCP.assert_called_with(1234, ANY, interface='::0') @@ -59,4 +59,4 @@ def register_signal_handlers(self) -> None: # Should call system exit with self.assertRaises(SystemExit): - CustomRunNode(argv=['--memory-storage', '--x-disable-ipv4', '--status', '1234']) + CustomRunNode(argv=['--temp-data', '--x-disable-ipv4', '--status', '1234']) diff --git a/tests/cli/test_shell.py b/tests/cli/test_shell.py index d85d4cfa5..b446bcf00 100644 --- a/tests/cli/test_shell.py +++ b/tests/cli/test_shell.py @@ -7,8 +7,8 @@ class ShellTest(unittest.TestCase): # In this case we just want to go through the code to see if it's okay - def test_shell_execution_memory_storage(self): - shell = Shell(argv=['--memory-storage', '--', '--extra-arg']) + def test_shell_execution_temp_data(self): + shell = Shell(argv=['--temp-data', '--', '--extra-arg']) self.assertTrue(shell is not None) def test_shell_execution_default_storage(self): diff --git a/tests/cli/test_sysctl_init.py b/tests/cli/test_sysctl_init.py index 2063d7f76..d7a3e5a03 100644 --- a/tests/cli/test_sysctl_init.py +++ b/tests/cli/test_sysctl_init.py @@ -132,7 +132,7 @@ def register_signal_handlers(self) -> None: run_node = CustomRunNode(argv=[ '--sysctl', 'tcp:8181', '--sysctl-init-file', sysctl_init_file_path, # relative to src/hathor - '--memory-storage', + '--temp-data', ]) self.assertTrue(run_node is not None) conn = run_node.manager.connections @@ -192,7 +192,7 @@ def register_signal_handlers(self) -> None: run_node = CustomRunNode(argv=[ '--sysctl', 'tcp:8181', '--sysctl-init-file', sysctl_init_file_path, # relative to src/hathor - '--memory-storage', + '--temp-data', ]) self.assertTrue(run_node is not None) conn = run_node.manager.connections diff --git a/tests/consensus/test_consensus.py b/tests/consensus/test_consensus.py index 13ada2786..0da029cdb 100644 --- a/tests/consensus/test_consensus.py +++ b/tests/consensus/test_consensus.py @@ -2,7 +2,6 @@ from hathor.execution_manager import ExecutionManager from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_tx -from hathor.transaction.storage import TransactionMemoryStorage from hathor.util import not_none from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions @@ -11,7 +10,7 @@ class ConsensusTestCase(unittest.TestCase): def setUp(self) -> None: super().setUp() - self.tx_storage = TransactionMemoryStorage(settings=self._settings) + self.tx_storage = self.create_tx_storage() self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] @@ -34,7 +33,7 @@ class MyError(Exception): manager.vertex_handler._execution_manager = execution_manager_mock manager.consensus_algorithm.unsafe_update = MagicMock(side_effect=MyError) - manager.propagate_tx(tx, fails_silently=False) + manager.propagate_tx(tx) execution_manager_mock.crash_and_exit.assert_called_once_with( reason=f"on_new_vertex() failed for tx {tx.hash_hex}" @@ -79,7 +78,7 @@ def test_revert_block_high_weight(self) -> None: b0 = tb0.generate_mining_block(manager.rng, storage=manager.tx_storage) b0.weight = 10 manager.cpu_mining_service.resolve(b0) - manager.propagate_tx(b0, fails_silently=False) + manager.propagate_tx(b0) b1 = add_new_block(manager, advance_clock=15) b2 = add_new_block(manager, advance_clock=15) @@ -140,7 +139,7 @@ def test_dont_revert_block_low_weight(self) -> None: b0 = manager.generate_mining_block() b0.parents = [blocks[-1].hash, conflicting_tx.hash, conflicting_tx.parents[0]] manager.cpu_mining_service.resolve(b0) - manager.propagate_tx(b0, fails_silently=False) + manager.propagate_tx(b0) b1 = add_new_block(manager, advance_clock=15) b2 = add_new_block(manager, advance_clock=15) @@ -195,7 +194,7 @@ def test_dont_revert_block_high_weight_transaction_verify_other(self) -> None: b0 = tb0.generate_mining_block(manager.rng, storage=manager.tx_storage) b0.weight = 10 manager.cpu_mining_service.resolve(b0) - manager.propagate_tx(b0, fails_silently=False) + manager.propagate_tx(b0) b1 = add_new_block(manager, advance_clock=15) b2 = add_new_block(manager, advance_clock=15) @@ -248,7 +247,7 @@ def test_dont_revert_block_high_weight_verify_both(self) -> None: b0.parents = [b0.parents[0], conflicting_tx.hash, conflicting_tx.parents[0]] b0.weight = 10 manager.cpu_mining_service.resolve(b0) - manager.propagate_tx(b0, fails_silently=False) + manager.propagate_tx(b0) b1 = add_new_block(manager, advance_clock=15) b2 = add_new_block(manager, advance_clock=15) diff --git a/tests/consensus/test_consensus2.py b/tests/consensus/test_consensus2.py index 82fa9be60..c4318b776 100644 --- a/tests/consensus/test_consensus2.py +++ b/tests/consensus/test_consensus2.py @@ -80,7 +80,7 @@ def test_two_conflicts_intertwined_once(self) -> None: initial = gen_new_tx(manager1, address, value) initial.weight = 25 initial.update_hash() - manager1.propagate_tx(initial, fails_silently=False) + manager1.propagate_tx(initial) self.graphviz.labels[initial.hash] = 'initial' x = initial @@ -116,7 +116,7 @@ def test_two_conflicts_intertwined_multiple_times(self) -> None: initial = gen_new_tx(manager1, address, value) initial.weight = 25 initial.update_hash() - manager1.propagate_tx(initial, fails_silently=False) + manager1.propagate_tx(initial) self.graphviz.labels[not_none(initial.hash)] = 'initial' x = initial diff --git a/tests/consensus/test_consensus5.py b/tests/consensus/test_consensus5.py index fa40dfd29..d119c6f59 100644 --- a/tests/consensus/test_consensus5.py +++ b/tests/consensus/test_consensus5.py @@ -62,7 +62,7 @@ def test_conflict_with_parent_tx(self) -> None: b2.nonce = self.rng.getrandbits(32) b2.update_hash() self.graphviz.labels[b2.hash] = 'b2' - self.assertTrue(manager1.propagate_tx(b2, fails_silently=False)) + self.assertTrue(manager1.propagate_tx(b2)) self.simulator.run(10) self.assertIsNone(txA1.get_metadata().voided_by) diff --git a/tests/consensus/test_first_block.py b/tests/consensus/test_first_block.py index 1b63e2555..4b291989d 100644 --- a/tests/consensus/test_first_block.py +++ b/tests/consensus/test_first_block.py @@ -1,4 +1,6 @@ +from hathor.transaction import Block, Transaction from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder class FirstBlockTestCase(unittest.TestCase): @@ -15,7 +17,7 @@ def setUp(self) -> None: .set_cpu_mining_service(cpu_mining_service) self.manager = self.create_peer_from_builder(builder) - self.dag_builder = self.get_dag_builder(self.manager) + self.dag_builder = TestDAGBuilder.from_manager(self.manager) def test_first_block(self) -> None: artifacts = self.dag_builder.build_from_str(""" @@ -40,21 +42,11 @@ def test_first_block(self) -> None: b33 --> tx50 """) - for node, vertex in artifacts.list: - self.manager.on_new_tx(vertex, fails_silently=False) - - b31 = artifacts.by_name['b31'].vertex - b32 = artifacts.by_name['b32'].vertex - b33 = artifacts.by_name['b33'].vertex - - tx10 = artifacts.by_name['tx10'].vertex - tx20 = artifacts.by_name['tx20'].vertex - tx30 = artifacts.by_name['tx30'].vertex - tx40 = artifacts.by_name['tx40'].vertex - tx41 = artifacts.by_name['tx41'].vertex - tx42 = artifacts.by_name['tx42'].vertex - tx43 = artifacts.by_name['tx43'].vertex - tx50 = artifacts.by_name['tx50'].vertex + artifacts.propagate_with(self.manager) + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + txs = ['tx10', 'tx20', 'tx30', 'tx40', 'tx41', 'tx42', 'tx43', 'tx50'] + tx10, tx20, tx30, tx40, tx41, tx42, tx43, tx50 = artifacts.get_typed_vertices(txs, Transaction) self.assertEqual(tx10.get_metadata().first_block, b31.hash) diff --git a/tests/consensus/test_soft_voided.py b/tests/consensus/test_soft_voided.py index a1bc57a19..74b3650e9 100644 --- a/tests/consensus/test_soft_voided.py +++ b/tests/consensus/test_soft_voided.py @@ -89,7 +89,7 @@ def _run_test( txC.timestamp = max(txC.timestamp, txA.timestamp + 1) txC.weight = 25 txC.update_hash() - self.assertTrue(manager2.propagate_tx(txC, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(txC)) metaC = txC.get_metadata() self.assertIsNone(metaC.voided_by) graphviz.labels[txC.hash] = 'txC' @@ -99,7 +99,7 @@ def _run_test( blk1.parents[1] = txA.hash blk1.nonce = self.rng.getrandbits(32) blk1.update_hash() - self.assertTrue(manager2.propagate_tx(blk1, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(blk1)) blk1meta = blk1.get_metadata() self.assertIsNone(blk1meta.voided_by) graphviz.labels[blk1.hash] = 'b1' @@ -110,7 +110,7 @@ def _run_test( blk2.parents[1] = txD1.hash blk2.nonce = self.rng.getrandbits(32) blk2.update_hash() - self.assertTrue(manager2.propagate_tx(blk2, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(blk2)) blk2meta = blk2.get_metadata() self.assertIsNone(blk2meta.voided_by) graphviz.labels[blk2.hash] = 'b2' @@ -120,7 +120,7 @@ def _run_test( blk3.parents[1] = txD2.hash blk3.nonce = self.rng.getrandbits(32) blk3.update_hash() - self.assertTrue(manager2.propagate_tx(blk3, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(blk3)) blk3meta = blk3.get_metadata() self.assertIsNone(blk3meta.voided_by) graphviz.labels[blk3.hash] = 'b3' diff --git a/tests/consensus/test_soft_voided2.py b/tests/consensus/test_soft_voided2.py index 70881c627..852af9c61 100644 --- a/tests/consensus/test_soft_voided2.py +++ b/tests/consensus/test_soft_voided2.py @@ -99,7 +99,7 @@ def gen_block(self, manager1: HathorManager, tx: Transaction, parent_block: Bloc block.timestamp = max(block.timestamp, tx.timestamp + 1) block.nonce = self.rng.getrandbits(32) block.update_hash() - self.assertTrue(manager1.propagate_tx(block, fails_silently=False)) + self.assertTrue(manager1.propagate_tx(block)) return block def _run_test(self, simulator: Simulator, soft_voided_tx_ids: set[VertexId]) -> Iterator[None]: @@ -124,7 +124,7 @@ def _run_test(self, simulator: Simulator, soft_voided_tx_ids: set[VertexId]) -> initial = gen_new_tx(manager1, address, value) initial.weight = 25 initial.update_hash() - manager1.propagate_tx(initial, fails_silently=False) + manager1.propagate_tx(initial) self.graphviz.labels[initial.hash] = 'initial' x = initial diff --git a/tests/consensus/test_soft_voided3.py b/tests/consensus/test_soft_voided3.py index bdb8c0a7c..38f803ebd 100644 --- a/tests/consensus/test_soft_voided3.py +++ b/tests/consensus/test_soft_voided3.py @@ -84,7 +84,7 @@ def _run_test( blk1.timestamp = txD1.timestamp + 1 blk1.nonce = self.rng.getrandbits(32) blk1.update_hash() - self.assertTrue(manager2.propagate_tx(blk1, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(blk1)) blk1meta = blk1.get_metadata() self.assertIsNone(blk1meta.voided_by) graphviz.labels[blk1.hash] = 'blk1' @@ -97,7 +97,7 @@ def _run_test( txC.parents[1] = txD1.hash txC.weight = 25 txC.update_hash() - manager2.propagate_tx(txC, fails_silently=False) + manager2.propagate_tx(txC) metaC = txC.get_metadata() self.assertIsNone(metaC.voided_by) graphviz.labels[txC.hash] = 'txC' @@ -105,7 +105,7 @@ def _run_test( txD2 = gen_custom_tx(manager2, [(txB, 0)]) txD2.timestamp = txD1.timestamp + 2 txD2.update_hash() - manager2.propagate_tx(txD2, fails_silently=False) + manager2.propagate_tx(txD2) graphviz.labels[txD2.hash] = 'txD2' blk1meta = blk1.get_metadata() diff --git a/tests/consensus/test_soft_voided4.py b/tests/consensus/test_soft_voided4.py index f60dab477..7bb510f75 100644 --- a/tests/consensus/test_soft_voided4.py +++ b/tests/consensus/test_soft_voided4.py @@ -79,7 +79,7 @@ def _run_test( txC.parents = tx_base.parents txC.update_hash() self.graphviz.labels[txC.hash] = 'txC' - self.assertTrue(manager2.propagate_tx(txC, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(txC)) metaC = txC.get_metadata() self.assertIsNone(metaC.voided_by) @@ -99,7 +99,7 @@ def _run_test( # dot = self.graphviz.dot() # dot.render('dot0') - self.assertTrue(manager2.propagate_tx(blk1, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(blk1)) blk1meta = blk1.get_metadata() self.graphviz.labels[blk1.hash] = 'blk1' self.assertIsNone(blk1meta.voided_by) @@ -110,7 +110,7 @@ def _run_test( blk2.update_timestamp(int(manager2.reactor.seconds())) blk2.nonce = self.rng.getrandbits(32) blk2.update_hash() - self.assertTrue(manager2.propagate_tx(blk2, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(blk2)) blk2meta = blk2.get_metadata() self.graphviz.labels[blk2.hash] = 'blk2' self.assertIsNone(blk2meta.voided_by) @@ -121,7 +121,7 @@ def _run_test( blk3.parents[1] = txB.hash blk3.nonce = self.rng.getrandbits(32) blk3.update_hash() - self.assertTrue(manager2.propagate_tx(blk3, fails_silently=False)) + self.assertTrue(manager2.propagate_tx(blk3)) blk3meta = blk3.get_metadata() self.graphviz.labels[blk3.hash] = 'blk3' diff --git a/tests/dag_builder/builder.py b/tests/dag_builder/builder.py new file mode 100644 index 000000000..a787fe83e --- /dev/null +++ b/tests/dag_builder/builder.py @@ -0,0 +1,50 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from types import ModuleType + +from mnemonic import Mnemonic + +from hathor.dag_builder import DAGBuilder +from hathor.dag_builder.types import WalletFactoryType +from hathor.manager import HathorManager +from hathor.util import Random +from hathor.wallet import HDWallet +from tests.nanocontracts import test_blueprints +from tests.utils import GENESIS_SEED + + +class TestDAGBuilder: + @staticmethod + def create_random_hd_wallet(rng: Random) -> HDWallet: + m = Mnemonic('english') + words = m.to_mnemonic(rng.randbytes(32)) + hd = HDWallet(words=words) + hd._manually_initialize() + return hd + + @staticmethod + def from_manager( + manager: HathorManager, + genesis_words: str | None = None, + wallet_factory: WalletFactoryType | None = None, + blueprints_module: ModuleType | None = None + ) -> DAGBuilder: + """Create a DAGBuilder instance from a HathorManager instance.""" + return DAGBuilder.from_manager( + manager=manager, + genesis_words=genesis_words or GENESIS_SEED, + wallet_factory=wallet_factory or (lambda: TestDAGBuilder.create_random_hd_wallet(manager.rng)), + blueprints_module=blueprints_module or test_blueprints, + ) diff --git a/tests/dag_builder/test_dag_builder.py b/tests/dag_builder/test_dag_builder.py new file mode 100644 index 000000000..aa60804fc --- /dev/null +++ b/tests/dag_builder/test_dag_builder.py @@ -0,0 +1,420 @@ +import pytest + +from hathor.nanocontracts import Blueprint, Context, OnChainBlueprint, public +from hathor.nanocontracts.types import NCDepositAction, NCWithdrawalAction, TokenUid +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb +from hathor.transaction import Block, Transaction +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts import test_blueprints + + +class MyBlueprint(Blueprint): + counter: int + + @public + def initialize(self, ctx: Context, initial: int) -> None: + self.counter = initial + + @public + def add(self, ctx: Context, value: int) -> int: + self.counter += value + return self.counter + + @public + def sub(self, ctx: Context, value: int) -> int: + self.counter -= value + return self.counter + + +class DAGBuilderTestCase(unittest.TestCase): + def setUp(self): + super().setUp() + + from hathor.simulator.patches import SimulatorCpuMiningService + from hathor.simulator.simulator import _build_vertex_verifiers + + cpu_mining_service = SimulatorCpuMiningService() + + builder = self.get_builder() \ + .set_vertex_verifiers_builder(_build_vertex_verifiers) \ + .set_cpu_mining_service(cpu_mining_service) + + self.manager = self.create_peer_from_builder(builder) + self.nc_catalog = self.manager.tx_storage.nc_catalog + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + def test_one_tx(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b1.out[0] <<< tx1 + b30 < tx1 # reward lock + b40 --> tx1 + """) + + artifacts.propagate_with(self.manager) + + v_order = [node.name for node, _ in artifacts.list] + + b1, b40 = artifacts.get_typed_vertices(['b1', 'b40'], Block) + tx1 = artifacts.get_typed_vertex('tx1', Transaction) + + # blockchain genesis b[1..50] + self.assertEqual(b1.parents[0], self._settings.GENESIS_BLOCK_HASH) + for i in range(2, 51): + prev = artifacts.by_name[f'b{i - 1}'].vertex + cur = artifacts.by_name[f'b{i}'].vertex + self.assertEqual(cur.parents[0], prev.hash) + + # b30 < tx1 + self.assertGreater(v_order.index('tx1'), v_order.index('b30')) + + # b1.out[0] <<< tx1 + self.assertEqual(tx1.inputs[0].tx_id, b1.hash) + + # b40 --> tx1 + self.assertEqual(tx1.get_metadata().first_block, b40.hash) + + def test_weight(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + blockchain b37 c[1..1] + b30 < dummy + b50 < c1 + + tx1.out[0] = 1 TKA + + TKA.weight = 31.8 + tx1.weight = 25.2 + c1.weight = 80.6 + """) + + artifacts.propagate_with(self.manager) + + c1, b38 = artifacts.get_typed_vertices(['c1', 'b38'], Block) + tx1 = artifacts.get_typed_vertex('tx1', Transaction) + tka = artifacts.get_typed_vertex('TKA', TokenCreationTransaction) + + self.assertAlmostEqual(tka.weight, 31.8) + self.assertAlmostEqual(tx1.weight, 25.2) + self.assertAlmostEqual(c1.weight, 80.6) + self.assertIsNotNone(b38.get_metadata().voided_by, b38) + + def test_spend_unspecified_utxo(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b30 < dummy + tx1.out[0] <<< tx2 + """) + + artifacts.propagate_with(self.manager) + + tx1 = artifacts.get_typed_vertex('tx1', Transaction) + self.assertEqual(len(tx1.outputs), 1) + # the default filler fills unspecified utxos with 1 HTR + self.assertEqual(tx1.outputs[0].value, 1) + self.assertEqual(tx1.outputs[0].token_data, 0) + + def test_block_parents(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b30 < dummy + + b32 --> tx1 + + b34 --> tx2 + + b36 --> tx3 + b36 --> tx4 + """) + + artifacts.propagate_with(self.manager) + + blocks = ['b30', 'b31', 'b32', 'b33', 'b34', 'b35', 'b36', 'b37'] + b0, b1, b2, b3, b4, b5, b6, b7 = artifacts.get_typed_vertices(blocks, Block) + tx1, tx2, tx3, tx4 = artifacts.get_typed_vertices(['tx1', 'tx2', 'tx3', 'tx4'], Transaction) + + self.assertEqual(b2.parents[0], b1.hash) + self.assertEqual(b3.parents[0], b2.hash) + self.assertEqual(b4.parents[0], b3.hash) + self.assertEqual(b5.parents[0], b4.hash) + self.assertEqual(b6.parents[0], b5.hash) + + self.assertEqual(set(b1.parents[1:]), set(b0.parents[1:])) + self.assertEqual(set(b3.parents[1:]), set(b2.parents[1:])) + self.assertEqual(set(b5.parents[1:]), set(b4.parents[1:])) + self.assertEqual(set(b7.parents[1:]), set(b6.parents[1:])) + + self.assertTrue(set(b2.parents[1:]).issubset([tx1.hash] + b1.parents[1:])) + self.assertTrue(set(b4.parents[1:]).issubset([tx2.hash] + b3.parents[1:])) + self.assertEqual(set(b6.parents[1:]), {tx3.hash, tx4.hash}) + + def test_custom_token(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..50] + b1.out[0] <<< tx1 + tx1.out[1] = 100 TKA + b30 < tx1 # reward lock + b30 < dummy # reward lock + b40 --> tx1 + """) + + artifacts.propagate_with(self.manager) + + tx1 = artifacts.get_typed_vertex('tx1', Transaction) + tka = artifacts.get_typed_vertex('TKA', TokenCreationTransaction) + + # TKA token creation transaction + self.assertEqual(tka.token_name, 'TKA') + self.assertEqual(tka.token_symbol, 'TKA') + + # tx1.out[1] = 100 TKA + self.assertEqual(tx1.outputs[1].value, 100) + self.assertEqual(tx1.get_token_uid(tx1.outputs[1].token_data), tka.hash) + + def test_big_dag(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis a[0..30] + blockchain a30 b[0..20] + blockchain b4 c[0..10] + + a30 < dummy + + b11 --> tx1 + b11 --> tx2 + + b14 --> tx1 + b14 --> tx3 + + c3 --> tx1 + c3 --> tx2 + + tx1 <-- tx2 <-- tx3 + + tx3 --> tx5 --> tx6 + + tx1.out[0] <<< tx2 tx3 + tx1.out[0] <<< tx4 + + a0.out[0] <<< tx1 + + tx1.out[0] = 100 HTR [wallet1] + tx1.out[1] = 50 TK1 [wallet2] + tx2.out[0] = 75 USDC [wallet1] + + USDC.out[0] = 100000 HTR + + b5 < c0 < c10 < b20 + b6 < tx3 + b16 < tx4 + """) + + artifacts.propagate_with(self.manager) + + def test_no_hash_conflict(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + blockchain genesis b[1..33] + + b30 < dummy + + tx10.out[0] <<< tx20 tx30 tx40 + """) + artifacts.propagate_with(self.manager) + + def test_propagate_with(self) -> None: + tx_storage = self.manager.tx_storage + artifacts = self.dag_builder.build_from_str(''' + blockchain genesis b[1..10] + b10 < dummy + tx1 <-- tx2 + ''') + + artifacts.propagate_with(self.manager, up_to='b5') + assert len(list(tx_storage.get_all_transactions())) == 8 # 3 genesis + 5 blocks + + artifacts.propagate_with(self.manager, up_to='b10') + assert len(list(tx_storage.get_all_transactions())) == 13 # 3 genesis + 10 blocks + + artifacts.propagate_with(self.manager, up_to='tx1') + assert len(list(tx_storage.get_all_transactions())) == 15 # 3 genesis + 10 blocks + dummy + tx1 + + artifacts.propagate_with(self.manager) + assert len(list(tx_storage.get_all_transactions())) == 16 # 3 genesis + 10 blocks + dummy + tx1 + tx2 + + def test_nc_transactions(self) -> None: + blueprint_id = b'x' * 32 + self.nc_catalog.blueprints[blueprint_id] = MyBlueprint + + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis a[0..40] + a30 < dummy + + tx1.nc_id = "{blueprint_id.hex()}" + tx1.nc_method = initialize(0) + + tx2.nc_id = tx1 + tx2.nc_method = add(5) + tx2.nc_deposit = 10 HTR + tx2.nc_deposit = 5 TKA + + tx3.nc_id = tx1 + tx3.nc_method = sub(3) + tx3.nc_deposit = 3 HTR + tx3.nc_withdrawal = 2 TKA + + a31 --> tx1 + a32 --> tx2 + a33 --> tx3 + """) + + artifacts.propagate_with(self.manager) + + tx1 = artifacts.by_name['tx1'].vertex + self.assertIsInstance(tx1, Transaction) + self.assertTrue(tx1.is_nano_contract()) + + htr_id = TokenUid(b'\0') + tka_id = TokenUid(artifacts.by_name['TKA'].vertex.hash) + + tx2 = artifacts.by_name['tx2'].vertex + tx3 = artifacts.by_name['tx3'].vertex + + ctx2 = tx2.get_nano_header().get_context() + self.assertEqual(dict(ctx2.actions), { + tka_id: (NCDepositAction(token_uid=tka_id, amount=5),), + htr_id: (NCDepositAction(token_uid=htr_id, amount=10),), + }) + + ctx3 = tx3.get_nano_header().get_context() + self.assertEqual(dict(ctx3.actions), { + htr_id: (NCDepositAction(token_uid=htr_id, amount=3),), + tka_id: (NCWithdrawalAction(token_uid=tka_id, amount=2),), + }) + + def test_multiline_literals(self) -> None: + artifacts = self.dag_builder.build_from_str(""" + tx.attr1 = ``` + test + ``` + tx.attr2 = ``` + if foo: + bar + ``` + """) + node = artifacts.by_name['tx'].node + + # asserting with raw shifted strings to make sure we get the expected output. + assert node.get_required_literal('attr1') == """\ +test""" + assert node.get_required_literal('attr2') == """\ +if foo: + bar""" + + invalid_start_texts = [ + """ + tx.attr1 = a``` + ``` + """, + """ + tx.attr1 = ```a + ``` + """, + """ + tx.attr1 = ```a``` + """, + ] + + for text in invalid_start_texts: + with pytest.raises(SyntaxError) as e: + self.dag_builder.build_from_str(text) + assert str(e.value) == 'invalid multiline string start' + + invalid_end_texts = [ + """ + tx.attr1 = ``` + a``` + """, + """ + tx.attr1 = ``` + ```a + """, + ] + + for text in invalid_end_texts: + with pytest.raises(SyntaxError) as e: + self.dag_builder.build_from_str(text) + assert str(e.value) == 'invalid multiline string end' + + with pytest.raises(SyntaxError) as e: + self.dag_builder.build_from_str(""" + tx.attr1 = ``` + test + """) + assert str(e.value) == 'unclosed multiline string' + + def test_on_chain_blueprints(self) -> None: + bet_code = load_builtin_blueprint_for_ocb('bet.py', 'Bet', test_blueprints) + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..11] + b10 < dummy + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + + ocb2.ocb_private_key = "{private_key}" + ocb2.ocb_password = "{password}" + + ocb3.ocb_private_key = "{private_key}" + ocb3.ocb_password = "{password}" + + nc1.nc_id = ocb1 + nc1.nc_method = initialize("00", "00", 0) + + nc2.nc_id = ocb2 + nc2.nc_method = initialize(0) + + nc3.nc_id = ocb3 + nc3.nc_method = initialize() + + ocb1 <-- ocb2 <-- ocb3 <-- b11 + b11 < nc1 < nc2 < nc3 + + ocb1.ocb_code = "{bet_code.encode().hex()}" + ocb2.ocb_code = test_blueprint1.py, TestBlueprint1 + ocb3.ocb_code = ``` + from hathor.nanocontracts import Blueprint + from hathor.nanocontracts.context import Context + from hathor.nanocontracts.types import public + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + __blueprint__ = MyBlueprint + ``` + """) + + artifacts.propagate_with(self.manager) + ocb1, ocb2, ocb3 = artifacts.get_typed_vertices(['ocb1', 'ocb2', 'ocb3'], OnChainBlueprint) + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + assert ocb1.get_blueprint_class().__name__ == 'Bet' + assert nc1.get_nano_header().nc_id == ocb1.hash + blueprint_class = self.manager.tx_storage.get_blueprint_class(ocb1.hash) + assert blueprint_class.__name__ == 'Bet' + + assert ocb2.get_blueprint_class().__name__ == 'TestBlueprint1' + assert nc2.get_nano_header().nc_id == ocb2.hash + blueprint_class = self.manager.tx_storage.get_blueprint_class(ocb2.hash) + assert blueprint_class.__name__ == 'TestBlueprint1' + + assert ocb3.get_blueprint_class().__name__ == 'MyBlueprint' + assert nc3.get_nano_header().nc_id == ocb3.hash + blueprint_class = self.manager.tx_storage.get_blueprint_class(ocb3.hash) + assert blueprint_class.__name__ == 'MyBlueprint' diff --git a/tests/dag_builder/test_dag_builter.py b/tests/dag_builder/test_dag_builter.py deleted file mode 100644 index c10741303..000000000 --- a/tests/dag_builder/test_dag_builter.py +++ /dev/null @@ -1,220 +0,0 @@ -from hathor.transaction.token_creation_tx import TokenCreationTransaction -from tests import unittest - - -class DAGCreatorTestCase(unittest.TestCase): - def setUp(self): - super().setUp() - - from hathor.simulator.patches import SimulatorCpuMiningService - from hathor.simulator.simulator import _build_vertex_verifiers - - cpu_mining_service = SimulatorCpuMiningService() - - builder = self.get_builder() \ - .set_vertex_verifiers_builder(_build_vertex_verifiers) \ - .set_cpu_mining_service(cpu_mining_service) - - self.manager = self.create_peer_from_builder(builder) - self.dag_builder = self.get_dag_builder(self.manager) - - def test_one_tx(self) -> None: - artifacts = self.dag_builder.build_from_str(""" - blockchain genesis b[1..50] - b1.out[0] <<< tx1 - b30 < tx1 # reward lock - b40 --> tx1 - """) - - for node, vertex in artifacts.list: - self.manager.on_new_tx(vertex, fails_silently=False) - - v_order = [node.name for node, _ in artifacts.list] - - tx1 = artifacts.by_name['tx1'].vertex - b1 = artifacts.by_name['b1'].vertex - b40 = artifacts.by_name['b40'].vertex - - # blockchain genesis b[1..50] - self.assertEqual(b1.parents[0], self._settings.GENESIS_BLOCK_HASH) - for i in range(2, 51): - prev = artifacts.by_name[f'b{i - 1}'].vertex - cur = artifacts.by_name[f'b{i}'].vertex - self.assertEqual(cur.parents[0], prev.hash) - - # b30 < tx1 - self.assertGreater(v_order.index('tx1'), v_order.index('b30')) - - # b1.out[0] <<< tx1 - self.assertEqual(tx1.inputs[0].tx_id, b1.hash) - - # b40 --> tx1 - self.assertEqual(tx1.get_metadata().first_block, b40.hash) - - def test_weight(self) -> None: - artifacts = self.dag_builder.build_from_str(""" - blockchain genesis b[1..50] - blockchain b37 c[1..1] - b30 < dummy - b50 < c1 - - tx1.out[0] = 1 TKA - - TKA.weight = 31.8 - tx1.weight = 25.2 - c1.weight = 80.6 - """) - - for node, vertex in artifacts.list: - self.manager.on_new_tx(vertex, fails_silently=False) - - tx1 = artifacts.by_name['tx1'].vertex - tka = artifacts.by_name['TKA'].vertex - c1 = artifacts.by_name['c1'].vertex - b38 = artifacts.by_name['b38'].vertex - - self.assertAlmostEqual(tka.weight, 31.8) - self.assertAlmostEqual(tx1.weight, 25.2) - self.assertAlmostEqual(c1.weight, 80.6) - self.assertIsNotNone(b38.get_metadata().voided_by, b38) - - def test_spend_unspecified_utxo(self) -> None: - artifacts = self.dag_builder.build_from_str(""" - blockchain genesis b[1..50] - b30 < dummy - tx1.out[0] <<< tx2 - """) - - for node, vertex in artifacts.list: - self.manager.on_new_tx(vertex, fails_silently=False) - - tx1 = artifacts.by_name['tx1'].vertex - self.assertEqual(len(tx1.outputs), 1) - # the default filler fills unspecified utxos with 1 HTR - self.assertEqual(tx1.outputs[0].value, 1) - self.assertEqual(tx1.outputs[0].token_data, 0) - - def test_block_parents(self) -> None: - artifacts = self.dag_builder.build_from_str(""" - blockchain genesis b[1..50] - b30 < dummy - - b32 --> tx1 - - b34 --> tx2 - - b36 --> tx3 - b36 --> tx4 - """) - - for node, vertex in artifacts.list: - self.manager.on_new_tx(vertex, fails_silently=False) - - b0 = artifacts.by_name['b30'].vertex - b1 = artifacts.by_name['b31'].vertex - b2 = artifacts.by_name['b32'].vertex - b3 = artifacts.by_name['b33'].vertex - b4 = artifacts.by_name['b34'].vertex - b5 = artifacts.by_name['b35'].vertex - b6 = artifacts.by_name['b36'].vertex - b7 = artifacts.by_name['b37'].vertex - - tx1 = artifacts.by_name['tx1'].vertex - tx2 = artifacts.by_name['tx2'].vertex - tx3 = artifacts.by_name['tx3'].vertex - tx4 = artifacts.by_name['tx4'].vertex - - self.assertEqual(b2.parents[0], b1.hash) - self.assertEqual(b3.parents[0], b2.hash) - self.assertEqual(b4.parents[0], b3.hash) - self.assertEqual(b5.parents[0], b4.hash) - self.assertEqual(b6.parents[0], b5.hash) - - self.assertEqual(set(b1.parents[1:]), set(b0.parents[1:])) - self.assertEqual(set(b3.parents[1:]), set(b2.parents[1:])) - self.assertEqual(set(b5.parents[1:]), set(b4.parents[1:])) - self.assertEqual(set(b7.parents[1:]), set(b6.parents[1:])) - - self.assertTrue(set(b2.parents[1:]).issubset([tx1.hash] + b1.parents[1:])) - self.assertTrue(set(b4.parents[1:]).issubset([tx2.hash] + b3.parents[1:])) - self.assertEqual(set(b6.parents[1:]), {tx3.hash, tx4.hash}) - - def test_custom_token(self) -> None: - artifacts = self.dag_builder.build_from_str(""" - blockchain genesis b[1..50] - b1.out[0] <<< tx1 - tx1.out[1] = 100 TKA - b30 < tx1 # reward lock - b30 < dummy # reward lock - b40 --> tx1 - """) - - for node, vertex in artifacts.list: - self.manager.on_new_tx(vertex, fails_silently=False) - - tka = artifacts.by_name['TKA'].vertex - tx1 = artifacts.by_name['tx1'].vertex - - # TKA token creation transaction - self.assertIsInstance(tka, TokenCreationTransaction) - self.assertEqual(tka.token_name, 'TKA') - self.assertEqual(tka.token_symbol, 'TKA') - - # tx1.out[1] = 100 TKA - self.assertEqual(tx1.outputs[1].value, 100) - self.assertEqual(tx1.get_token_uid(tx1.outputs[1].token_data), tka.hash) - - def test_big_dag(self) -> None: - artifacts = self.dag_builder.build_from_str(""" - blockchain genesis a[0..30] - blockchain a30 b[0..20] - blockchain b4 c[0..10] - - a30 < dummy - - b11 --> tx1 - b11 --> tx2 - - b14 --> tx1 - b14 --> tx3 - - c3 --> tx1 - c3 --> tx2 - - tx1 <-- tx2 <-- tx3 - - tx3 --> tx5 --> tx6 - - tx1.out[0] <<< tx2 tx3 - tx1.out[0] <<< tx4 - - a0.out[0] <<< tx1 - - tx1.out[0] = 100 HTR [wallet1] - tx1.out[1] = 50 TK1 [wallet2] - tx2.out[0] = 75 USDC [wallet1] - - USDC.out[0] = 100000 HTR - - b5 < c0 < c10 < b20 - b6 < tx3 - b16 < tx4 - """) - - for node, vertex in artifacts.list: - self.manager.on_new_tx(vertex, fails_silently=False) - - def test_no_hash_conflict(self) -> None: - artifacts = self.dag_builder.build_from_str(""" - blockchain genesis b[1..33] - - b30 < dummy - - tx10.out[0] <<< tx20 tx30 tx40 - """) - - for node, vertex in artifacts.list: - print() - print(node.name) - print() - self.manager.on_new_tx(vertex, fails_silently=False) diff --git a/tests/event/event_simulation_tester.py b/tests/event/event_simulation_tester.py index e04f05466..00fedaa71 100644 --- a/tests/event/event_simulation_tester.py +++ b/tests/event/event_simulation_tester.py @@ -18,7 +18,6 @@ from twisted.internet.testing import StringTransport -from hathor.builder import Builder from hathor.event.websocket import EventWebsocketProtocol from hathor.event.websocket.request import Request from hathor.event.websocket.response import EventResponse, InvalidRequestResponse @@ -29,13 +28,16 @@ class BaseEventSimulationTester(SimulatorTestCase): - builder: Builder + def setUp(self) -> None: + super().setUp() + self._prepare(reward_spend_min_blocks=1) # to make tests run quicker - def _create_artifacts(self) -> None: + def _prepare(self, reward_spend_min_blocks: int) -> None: peer = PrivatePeer.auto_generated() - builder = self.builder.set_peer(peer) \ - .disable_full_verification() \ - .enable_event_queue() + builder = self.simulator.get_default_builder() \ + .set_peer(peer) \ + .enable_event_queue() \ + .set_settings(self._settings._replace(REWARD_SPEND_MIN_BLOCKS=reward_spend_min_blocks)) artifacts = self.simulator.create_artifacts(builder) self.peer_id: str = str(peer.id) @@ -90,22 +92,3 @@ def _decode_values(values: bytes) -> Iterable[dict[str, Any]]: yield json_loadb(value) buf = new_buf - - -class MemoryEventSimulationTester(BaseEventSimulationTester): - def setUp(self) -> None: - super().setUp() - self.builder = self.simulator.get_default_builder() - self._create_artifacts() - - -class RocksDBEventSimulationTester(BaseEventSimulationTester): - def setUp(self) -> None: - super().setUp() - import tempfile - - directory = tempfile.mkdtemp() - self.tmpdirs.append(directory) - - self.builder = self.simulator.get_default_builder().use_rocksdb(path=directory) - self._create_artifacts() diff --git a/tests/event/test_base_event.py b/tests/event/test_base_event.py index fe842764e..15393d5e9 100644 --- a/tests/event/test_base_event.py +++ b/tests/event/test_base_event.py @@ -43,6 +43,7 @@ def test_create_base_event(event_id: int, group_id: int | None) -> None: signal_bits=0, version=1, weight=10.0, + headers=[], inputs=[], outputs=[], parents=[], @@ -64,7 +65,8 @@ def test_create_base_event(event_id: int, group_id: int | None) -> None: accumulated_weight_raw="1024", score_raw="1048576", height=100, - validation='validation' + validation='validation', + nc_execution=None, ) ), group_id=group_id diff --git a/tests/event/test_event_manager.py b/tests/event/test_event_manager.py index cebd50ae3..b77c8cf62 100644 --- a/tests/event/test_event_manager.py +++ b/tests/event/test_event_manager.py @@ -1,5 +1,5 @@ from hathor.event.model.event_type import EventType -from hathor.event.storage.memory_storage import EventMemoryStorage +from hathor.event.storage import EventRocksDBStorage from hathor.pubsub import HathorEvents from hathor.util import not_none from tests import unittest @@ -9,11 +9,12 @@ class EventManagerTest(unittest.TestCase): def setUp(self) -> None: super().setUp() self.network = 'testnet' - self.event_storage = EventMemoryStorage() + self.event_storage = EventRocksDBStorage( + rocksdb_storage=self.create_rocksdb_storage(), + ) self.manager = self.create_peer( self.network, enable_event_queue=True, - full_verification=False, event_storage=self.event_storage ) diff --git a/tests/event/test_event_reorg.py b/tests/event/test_event_reorg.py index 873aeea88..bf3e9eab5 100644 --- a/tests/event/test_event_reorg.py +++ b/tests/event/test_event_reorg.py @@ -1,5 +1,5 @@ from hathor.event.model.event_type import EventType -from hathor.event.storage import EventMemoryStorage +from hathor.event.storage import EventRocksDBStorage from hathor.simulator.utils import add_new_blocks from tests import unittest from tests.utils import BURN_ADDRESS, get_genesis_key @@ -9,11 +9,12 @@ class EventReorgTest(unittest.TestCase): def setUp(self) -> None: super().setUp() self.network = 'testnet' - self.event_storage = EventMemoryStorage() + self.event_storage = EventRocksDBStorage( + rocksdb_storage=self.create_rocksdb_storage(), + ) self.manager = self.create_peer( self.network, enable_event_queue=True, - full_verification=False, event_storage=self.event_storage ) @@ -34,7 +35,7 @@ def test_reorg_events(self) -> None: b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage, address=BURN_ADDRESS) b0.weight = 10 self.manager.cpu_mining_service.resolve(b0) - self.manager.propagate_tx(b0, fails_silently=False) + self.manager.propagate_tx(b0) self.log.debug('reorg block propagated') self.run_to_completion() @@ -50,6 +51,7 @@ def test_reorg_events(self) -> None: (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[0].hash_hex}), (EventType.VERTEX_METADATA_CHANGED, {'hash': self._settings.GENESIS_TX2_HASH.hex()}), (EventType.VERTEX_METADATA_CHANGED, {'hash': self._settings.GENESIS_TX1_HASH.hex()}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': self._settings.GENESIS_BLOCK_HASH.hex()}), (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[0].hash_hex}), (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[1].hash_hex}), (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[1].hash_hex}), diff --git a/tests/event/test_event_simulation_responses.py b/tests/event/test_event_simulation_responses.py index c2726dddc..779d1b485 100644 --- a/tests/event/test_event_simulation_responses.py +++ b/tests/event/test_event_simulation_responses.py @@ -15,16 +15,10 @@ from hathor.event.websocket.request import AckRequest, StartStreamRequest, StopStreamRequest from hathor.event.websocket.response import InvalidRequestType from hathor.simulator.trigger import StopAfterNMinedBlocks -from tests.event.event_simulation_tester import ( - BaseEventSimulationTester, - MemoryEventSimulationTester, - RocksDBEventSimulationTester, -) +from tests.event.event_simulation_tester import BaseEventSimulationTester -class BaseEventSimulationResponsesTest(BaseEventSimulationTester): - __test__ = False - +class EventSimulationResponsesTest(BaseEventSimulationTester): def test_no_start_no_blocks(self) -> None: self.simulator.run(36000) @@ -144,10 +138,10 @@ def test_restart(self) -> None: responses = self._get_success_responses() # genesis events (5) - # + VERTEX_METADATA_CHANGED, one for each genesis tx (2) + # + VERTEX_METADATA_CHANGED, one for each genesis tx (2) and for the genesis block (1) # + one NEW_VERTEX_ACCEPTED and one VERTEX_METADATA_CHANGED for each new block (2*10) # there are free slots in window_size - assert len(responses) == 5 + 2 + 2 * 10 # = 27 + assert len(responses) == 5 + 3 + 2 * 10 # = 28 assert responses[0].event.id == 0 # no ack, so we get from the first event # stop the event stream @@ -173,9 +167,9 @@ def test_restart(self) -> None: # get responses responses = self._get_success_responses() - # events from before (27) + # events from before (28) # + one NEW_VERTEX_ACCEPTED and one VERTEX_METADATA_CHANGED for each new block (2*10) - assert len(responses) == 27 + 2 * 10 + assert len(responses) == 28 + 2 * 10 assert responses[0].event.id == 0 # no ack, so we get from the first event def test_restart_with_ack(self) -> None: @@ -195,10 +189,10 @@ def test_restart_with_ack(self) -> None: responses = self._get_success_responses() # genesis events (5) - # + VERTEX_METADATA_CHANGED, one for each genesis tx (2) + # + VERTEX_METADATA_CHANGED, one for each genesis tx (2) and for the genesis block (1) # + one NEW_VERTEX_ACCEPTED and one VERTEX_METADATA_CHANGED for each new block (2*10) # there are free slots in window_size - assert len(responses) == 5 + 2 + 2 * 10 # = 27 + assert len(responses) == 5 + 3 + 2 * 10 # = 28 assert responses[0].event.id == 0 # no ack, so we get from the first event # stop the event stream @@ -218,7 +212,7 @@ def test_restart_with_ack(self) -> None: miner.stop() # restart event stream from last event - start_stream = StartStreamRequest(type='START_STREAM', window_size=100, last_ack_event_id=26) + start_stream = StartStreamRequest(type='START_STREAM', window_size=100, last_ack_event_id=27) self._send_request(start_stream) self.simulator.run(36000) @@ -227,7 +221,7 @@ def test_restart_with_ack(self) -> None: # one NEW_VERTEX_ACCEPTED and one VERTEX_METADATA_CHANGED for each new block (2*10) assert len(responses) == 2 * 10 - assert responses[0].event.id == 27 # ack=26, so we get from event 27 + assert responses[0].event.id == 28 # ack=27, so we get from event 28 def test_restart_with_ack_too_small(self) -> None: # start the event stream @@ -247,14 +241,14 @@ def test_restart_with_ack_too_small(self) -> None: responses = self._get_success_responses() # genesis events (5) - # + VERTEX_METADATA_CHANGED, one for each genesis tx (2) + # + VERTEX_METADATA_CHANGED, one for each genesis tx (2) and for the genesis block (1) # + one NEW_VERTEX_ACCEPTED and one VERTEX_METADATA_CHANGED for each new block (2*10) # there are free slots in window_size - assert len(responses) == 5 + 2 + 2 * 10 # = 27 + assert len(responses) == 5 + 3 + 2 * 10 # = 28 assert responses[0].event.id == 0 # no ack, so we get from the first event # ack all received events - ack = AckRequest(type='ACK', window_size=100, ack_event_id=26) + ack = AckRequest(type='ACK', window_size=100, ack_event_id=27) self._send_request(ack) self.simulator.run(36000) @@ -359,11 +353,3 @@ def test_multiple_interactions(self) -> None: assert len(responses) == 4 # 4 events because of window size assert responses[0].event.id == 8 # ack=7, so we get from event 8 - - -class MemoryEventSimulationResponsesTest(BaseEventSimulationResponsesTest, MemoryEventSimulationTester): - __test__ = True - - -class RocksDBEventSimulationResponsesTest(BaseEventSimulationResponsesTest, RocksDBEventSimulationTester): - __test__ = True diff --git a/tests/event/test_event_simulation_scenarios.py b/tests/event/test_event_simulation_scenarios.py index 89cd57e42..aacd6fd80 100644 --- a/tests/event/test_event_simulation_scenarios.py +++ b/tests/event/test_event_simulation_scenarios.py @@ -17,6 +17,7 @@ from hathor.event.model.event_data import ( DecodedTxOutput, EmptyData, + NCEventData, ReorgData, SpentOutput, TxData, @@ -28,27 +29,28 @@ from hathor.event.model.event_type import EventType from hathor.event.websocket.request import StartStreamRequest from hathor.event.websocket.response import EventResponse -from tests.event.event_simulation_tester import ( - BaseEventSimulationTester, - MemoryEventSimulationTester, - RocksDBEventSimulationTester, -) +from hathor.transaction import Block, Transaction +from tests.event.event_simulation_tester import BaseEventSimulationTester -class BaseEventSimulationScenariosTest(BaseEventSimulationTester): +class EventSimulationScenariosTest(BaseEventSimulationTester): """ NOTE: The lists of expected events used in tests below were generated by printing the event responses list to the console and then copying the output and manipulating it to create instances. """ - __test__ = False - seed_config = 6946502462188444706 + def setUp(self) -> None: + super().setUp() + self.genesis_block_hash = self._settings.GENESIS_BLOCK_HASH.hex() + self.genesis_tx1_hash = self._settings.GENESIS_TX1_HASH.hex() + self.genesis_tx2_hash = self._settings.GENESIS_TX2_HASH.hex() + def assert_response_equal(self, responses: list[EventResponse], expected: list[EventResponse]) -> None: """Compare responses and expected responses. """ - self.assertEqual(len(responses), len(expected)) + self.assertEqual(len(responses), len(expected), f'\nexpected: {expected}\nactual: {responses}') for a, b in zip(responses, expected): self.assertEqual(type(a), type(b)) @@ -95,19 +97,20 @@ def test_single_chain_one_block(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[''], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=8, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=9, stream_id=stream_id) # noqa: E501 ] self.assert_response_equal(responses, expected) @@ -122,56 +125,57 @@ def test_single_chain_blocks_and_transactions(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[''], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=1578879030.75, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=1578879030.75, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=18.4904519466213, inputs=[TxInput(tx_id='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', index=0, spent_output=TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)))], outputs=[TxOutput(value=3400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None)), TxOutput(value=2000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.49045136082641, score=0.0, accumulated_weight_raw="368282", score_raw="0", first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[SpentOutput(index=0, tx_ids=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6'])], conflict_with=[], voided_by=[], received_by=[], children=['d2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9'], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=1578879091.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', 'd2bd5f83fcbfa5dee2b602ddc18ebd4f7714e1ecf928824f862efb0559dcb4d6', '5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='7c7449a44a6adf26fb9b68f8c2b7751905c788b417946c43b8a999d0b66f76d9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.576585834390443, accumulated_weight_raw="256", score_raw="781879", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id) # noqa: E501 ] self.assert_response_equal(responses, expected) @@ -186,39 +190,40 @@ def test_reorg(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878940.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[''], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager1 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878940.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, voiding it - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', nonce=0, timestamp=1578878940, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', spent_outputs=[], conflict_with=[], voided_by=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578879064.0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578879064.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id), # noqa: E501 # REORG_STARTED caused by a new block from manager2 (below) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578879064.25, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', new_best_block='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578879064.25, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', new_best_block='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', common_block='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792'), group_id=0), latest_event_id=21, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 # Also one VERTEX_METADATA_CHANGED for the previous block, un-voiding it as it's now part of the best blockchain # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=0), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', nonce=0, timestamp=1578879000, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUfBo1MGBHkHtXDktO+BxtBdh5T5GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHqKa5Y6viZ8fkH2bd1qQBdsZnrtsmruqS', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=21, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578879064.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['82afedcd590f7ad34d09475fc1dfd00e5a0f8ad6b70508ca4659351709c90f9a', '1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=21, stream_id=stream_id), # noqa: E501 # REORG_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578879064.25, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=20, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=21, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block from manager2 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=20, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578879064.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', nonce=0, timestamp=1578879001, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUgQrqLefPfPVpkXlfvvAp943epyOIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJHSdTickduA1MF9PTbzBQi6Z7stNAzwAu', timelock=None))], parents=['1204b8c30f0236ae6f1841d0c4805a47089c4d5e3ccd0dcab8aa65f0e4991533', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='38e7f91420ae78ae01707f80c29abe692beebf9d5575cc7c9248e9bdc78169c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=21, stream_id=stream_id) # noqa: E501 ] self.assert_response_equal(responses, expected) @@ -233,63 +238,66 @@ def test_unvoided_transaction(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, type=EventType.LOAD_STARTED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, type=EventType.LOAD_STARTED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, type=EventType.LOAD_FINISHED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, type=EventType.LOAD_FINISHED, timestamp=0, data=EmptyData(), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[''], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output, and one VERTEX_METADATA_CHANGED adding the new tx as twin/conflict of the previous tx # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, accumulated_weight_raw="524288", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=[], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, accumulated_weight_raw="524288", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=[], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx that is a twin of the previous one. It's voided. - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, accumulated_weight_raw="524288", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], received_by=[], children=[], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.0, score=0.0, accumulated_weight_raw="524288", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each twin tx, inverting the voided state of them. # noqa E501 # The order of events is important, we receive the voided txs first, then reverse topological ordering. - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, accumulated_weight_raw="524544", score_raw="0", first_block='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=19.0005, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4', spent_outputs=[], conflict_with=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], voided_by=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], received_by=[], children=[], twins=['0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], accumulated_weight=19.00050072657387, score=0.0, accumulated_weight_raw="524470", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.VERTEX_METADATA_CHANGED, timestamp=0, data=TxData(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=19.0, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88', spent_outputs=[], conflict_with=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], voided_by=[], received_by=[], children=['24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a'], twins=['cba55aadc9fd8d5bdb6f394d8f5eb00cc775db12c2512c9e37df8e31ca3841f4'], accumulated_weight=19.000704269011248, score=0.0, accumulated_weight_raw="524544", score_raw="0", first_block='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=40, type=EventType.NEW_VERTEX_ACCEPTED, timestamp=0, data=TxData(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '0639e93ff22647ed06af3ac3a3bc7dd2ca8db18c67fdd9a039318b4d6bf51a88'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='24707288e7c72c5e74c68241ee32d64239902533e64946de6e6cddb66ef3432a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=19.000858282039708, accumulated_weight_raw="256", score_raw="524600", first_block=None, height=12, validation='full'), aux_pow=None), group_id=None), latest_event_id=40, stream_id=stream_id), # noqa: E501 ] self.assert_response_equal(responses, expected) def test_invalid_mempool(self) -> None: + # This test implementation is depending on 10 blocks, improve this after refactor to dag builder + self._prepare(reward_spend_min_blocks=10) stream_id = self.manager._event_manager._stream_id assert stream_id is not None Scenario.INVALID_MEMPOOL_TRANSACTION.simulate(self.simulator, self.manager) @@ -299,63 +307,64 @@ def test_invalid_mempool(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=1578878880.0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=1578878880.0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=1578878880.0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[''], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=1578878910.25, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878910.25, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=1578878970.5, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.656777477108076, score=0.0, accumulated_weight_raw="413285", score_raw="0", first_block=None, height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id), # noqa: E501 # REORG_STARTED caused by a block with lower height but higher weight (below) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=2, previous_best_block='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', new_best_block='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', common_block='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=2, previous_best_block='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', new_best_block='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', common_block='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for each block that was voided by the reorg - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full'), aux_pow=None), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for the new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded={})], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for the block that had its output unspent, since the previous tx was removed - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=[])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=[])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full'), aux_pow=None), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for each parent of the tx that was removed - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full'), aux_pow=None), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One VERTEX_REMOVED for the tx above - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.VERTEX_REMOVED, data=TxDataWithoutMeta(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=40, timestamp=0, type=EventType.VERTEX_REMOVED, data=TxDataWithoutMeta(hash='5453759e15a6413a06390868cbb56509704c6f3f7d25f443556d8d6b2dacc650', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.656776158409354, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 # REORG_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=40, timestamp=0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=41, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=41, timestamp=0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=42, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for the block that caused the reorg - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=41, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded=None)], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=41, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=42, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', nonce=0, timestamp=1578879030, signal_bits=0, version=0, weight=10.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='', decoded={})], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='2e3122412eb129c7f0d03e37d8a5637da9354df980a2259332b2b14e7a340d94', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=10.0, score=10.066089190457772, accumulated_weight_raw="1024", score_raw="1072", first_block=None, height=10, validation='full'), aux_pow=None), group_id=None), latest_event_id=42, stream_id=stream_id) # noqa: E501 ] self.assert_response_equal(responses, expected) @@ -370,56 +379,57 @@ def test_empty_script(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their first block # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', 'fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', 'eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', 'f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[''], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['32fea29451e575e9e001f55878f4df61a2f6cf0212c4b9cbfb8125691d5377a8', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['896593a8103553e6f54c46901f8c14e62618efe7f18c5afd48cf26e96db9e393', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['0b71c21b8000f05241283a848b99e38f27a94a188def7ef1b93f8b0828caba49', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['97b711632054189cbeb1ef4707b7d48c84e6af9a0395a4484030fb3202e691e3', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['6b5e6201d81381a49fa7febe15f46d440360d8e7b1a0ddbe42e59889f32af56e', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['fdc65dbd3675a01a39343dd0c4a05eea471c3bd7015bb96cea0bde7143e24c5d', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['eb3c4684dfad95a5b9d1c88f3463b91fe44bbe7b00e4b810648ca9e9ff5685a6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['1eb8f2c848828831c0e50f13b6ea54cac99494031ebad0318c7b142acb5540b7', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', spent_outputs=[SpentOutput(index=0, tx_ids=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'])], conflict_with=[], voided_by=[], received_by=[], children=['8ab45f3b35f8dc437fb4a246d9b7dd3d3d5cfb7270e516076718a7a94598cf2f'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded={}))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded={}))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=15.990494828748208, inputs=[TxInput(tx_id='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', index=1, spent_output=TxOutput(value=1000, token_data=0, script='', decoded={}))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None))], parents=['ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=15.990502884098087, score=0.0, accumulated_weight_raw="65106", score_raw="0", first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.449427506558003, inputs=[TxInput(tx_id='9b83e5dbc7145a5a161c34da4bec4e1a64dc02a3f2495a2db78457426c9ee6bf', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUPXOcGnrN0ZB2WrnPVcjdCCcacL+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC846khX278aM1utqAgPzkKAxBTfftaRDm', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUutgaVG8W5OnzgAEVUqB4XgmDgm2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPZ4x7a2NXdrMa5ksPfeGMZmjhJHTjDZ9Q', timelock=None)), TxOutput(value=1000, token_data=0, script='', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d', spent_outputs=[SpentOutput(index=1, tx_ids=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11'])], conflict_with=[], voided_by=[], received_by=[], children=['3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d'], twins=[], accumulated_weight=18.44942676691887, score=0.0, accumulated_weight_raw="357957", score_raw="0", first_block='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id)] # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmkey79Rbhjq4BtHYCm2mT8hDprWIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLatLcoaATFMqECb5fD5rdW2nF9WGyw9os', timelock=None))], parents=['f349fc0f570a636a440ed3853cc533faa2c4616160e1d9eb6f5d656a90da30fb', '3cd0d6caa93fcb179cfcd68c2faca1be2cca20cafa339bac10c57e64b9404f11', 'ea8f1b24846331047e73a33c23210ac2af1d812f14f0225a26337e52aab2435d'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='da38db48836d99beec10aece24c41f6d9f6a55ab5566d7ef5851af2952fb607d', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.691576556156242, accumulated_weight_raw="256", score_raw="423375", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id)] # noqa: E501 self.assert_response_equal(responses, expected) @@ -433,56 +443,196 @@ def test_custom_script(self) -> None: expected = [ # LOAD_STATED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', nonce=0, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU/QUFm2AGJJVDuC82h2oXxz/SJnuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HVayMofEDh4XGsaQJeRJKhutYxYodYNop6', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # LOAD_FINISHED - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their first block # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', 'ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', 'c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', 'db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, metadata=TxMetadata(hash='', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[''], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw="4", score_raw="4", first_block='', height=0, validation='full'), aux_pow=None), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED and one NEW_VERTEX_ACCEPTED for 10 new blocks - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', nonce=0, timestamp=1578878911, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUu9S/kjy3HbglEu3bA4JargdORiiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPeHcEFtRZvMBijqFwccicDMkN17hoNq21', timelock=None))], parents=['2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62'], twins=[], accumulated_weight=2.0, score=4.321928094887363, accumulated_weight_raw="4", score_raw="20", first_block=None, height=2, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', nonce=0, timestamp=1578878912, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUzskI6jayLvTobJDhpVZiuMu7zt+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRNWR1HpdAiDx7va9VkNUuqqSo2MGW5iE6', timelock=None))], parents=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6'], twins=[], accumulated_weight=2.0, score=4.584962500721156, accumulated_weight_raw="4", score_raw="24", first_block=None, height=3, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', nonce=0, timestamp=1578878913, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU7B7Cf/pnj2DglfhnqyiRzxNg+K2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HU3chqobPRBt8pjYXt4WahKERjV8UMCWbd', timelock=None))], parents=['1aa2c724f1932b04a8358ab41a9bca864c3528b69afcc8df83e104cad3247a62', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f'], twins=[], accumulated_weight=2.0, score=4.807354922057604, accumulated_weight_raw="4", score_raw="28", first_block=None, height=4, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', nonce=0, timestamp=1578878914, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUZmTJ0of2Ce9iuycIVpFCVU08WmKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFrY3outhFVXGLEvaVKVFkd2nB1ihumXCr', timelock=None))], parents=['3ec4aadfbcd5aa4cbf14f6198b56d30158e865f8e907e494d7a7813ac6b6b5e6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea'], twins=[], accumulated_weight=2.0, score=5.0, accumulated_weight_raw="4", score_raw="32", first_block=None, height=5, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', nonce=0, timestamp=1578878915, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPNN8M/qangqd2wYSzu0u+3OmwDmIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HC4kH6pnYBofzTSFWRpA71Po7geNURh5p2', timelock=None))], parents=['ce9cca7b876ea1cfa3b47e3a8d63c054cf974a3aa421c1bc1dba13e9b44a2f2f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084'], twins=[], accumulated_weight=2.0, score=5.169925001442312, accumulated_weight_raw="4", score_raw="36", first_block=None, height=6, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', nonce=0, timestamp=1578878916, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUxbNqvpWbgNtk9km/VuYhzHHMp76IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HQYUSF8ytNmm92GYMCS8XPYkt3JeKkBDyj', timelock=None))], parents=['61179abc731d966f722d0d8b06a9d405672065887279bf9a5d13f90e18d3faea', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a'], twins=[], accumulated_weight=2.0, score=5.321928094887363, accumulated_weight_raw="4", score_raw="40", first_block=None, height=7, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', nonce=0, timestamp=1578878917, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkU48C0XcFpiaWq2gwTICyEVdvJXcCIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HTHNdEhmQeECj5brwUzHK4Sq3fFrFiEvaK', timelock=None))], parents=['95f79d8bb3363ea030d209428c11f0a77bb675f42c59892f66eeb0c90f437084', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1'], twins=[], accumulated_weight=2.0, score=5.459431618637297, accumulated_weight_raw="4", score_raw="44", first_block=None, height=8, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', nonce=0, timestamp=1578878918, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUmQRjqRyxq26raJZnhnpRJsrS9n2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HLUD2fi9udkg3ysPKdGvbWDyHFWdXBY1i1', timelock=None))], parents=['c4707e982d6d980b3ec5501b0e2c43eed55439d4b6fb34565694fe58e00cac1a', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc'], twins=[], accumulated_weight=2.0, score=5.584962500721156, accumulated_weight_raw="4", score_raw="48", first_block=None, height=9, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', nonce=0, timestamp=1578878919, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUYFHjcujZZHs0JWZkriEbn5jTv/aIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HFJRMUG7GTjdqG5f6e5tqnrnquBMFCvvs2', timelock=None))], parents=['db4d5e585d0c70f69bab6e61405078b6435dac84e7b731a85f97a282b1f3d9c1', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218'], twins=[], accumulated_weight=2.0, score=5.700439718141092, accumulated_weight_raw="4", score_raw="52", first_block=None, height=10, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', nonce=0, timestamp=1578878920, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUBvl1aaAtzoh8a9vaZoqXA6JxK4OIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H7A1HBirZ4EhWtCWLcAy4yw6ybWcKnjdfG', timelock=None))], parents=['09a4e391189dce39b747ce9e2231e7079cf737a173d9004a68826c52051f2bdc', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=5.807354922057604, accumulated_weight_raw="4", score_raw="56", first_block=None, height=11, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a block, adding the new tx as spending their output # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[SpentOutput(index=0, tx_ids=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'])], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', nonce=0, timestamp=1578878910, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['339f47da87435842b0b1b528ecd9eac2495ce983b3e9c923a37e1befbe12c792', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', spent_outputs=[SpentOutput(index=0, tx_ids=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'])], conflict_with=[], voided_by=[], received_by=[], children=['8b3f0d414755bf6a071deb83b51c5276e41b076b14307123399b804a022f7b19'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw="4", score_raw="16", first_block=None, height=1, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new tx (below), and one VERTEX_METADATA_CHANGED for a tx, adding the new tx as spending their output and children # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded={}))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new tx - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded={}))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block=None, height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each confirmed transaction (first block changed) # noqa E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded=None)], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', '99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', nonce=0, timestamp=1578879030, signal_bits=0, version=1, weight=16.12160141040609, inputs=[TxInput(tx_id='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', index=1, spent_output=TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded={}))], outputs=[TxOutput(value=1000, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None))], parents=['cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=16.12159422192467, score=0.0, accumulated_weight_raw="71299", score_raw="0", first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', nonce=0, timestamp=1578878970, signal_bits=0, version=1, weight=18.55128132611371, inputs=[TxInput(tx_id='2ceb49662c7a9b468a93d2f1bb5849e9412b6e2e6b6bec8df8d6dc65d48ad4e9', index=0, spent_output=TxOutput(value=6400, token_data=0, script='dqkUXRFxfhIYOXURHjiAlx9XPuMh7E2IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HF1E8Aibb17Rha6r1cM1oCp74DRmYqP61V', timelock=None)))], outputs=[TxOutput(value=5400, token_data=0, script='dqkUFgE9a6rVMusN303z18sYfjdpYGqIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H8XUjiUx24WLXUN63da34hX6bEs29GJjSs', timelock=None)), TxOutput(value=1000, token_data=0, script='CXNvbWVfZGF0YYhR', decoded={})], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec', spent_outputs=[SpentOutput(index=1, tx_ids=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3'])], conflict_with=[], voided_by=[], received_by=[], children=['3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', '99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302'], twins=[], accumulated_weight=18.55128018336992, score=0.0, accumulated_weight_raw="384142", score_raw="0", first_block='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', height=0, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id), # noqa: E501 # One NEW_VERTEX_ACCEPTED for a new block - EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=38, stream_id=stream_id) # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', nonce=0, timestamp=1578879090, signal_bits=0, version=0, weight=8.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUTisHvpM4sDeINzxF5auK/8bP6UaIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDeSe6qKqjSLwtnjLBV84NddtZQyNb9HUU', timelock=None))], parents=['8fa74324107529b23223b1639a9c8a37cb8bdbb25aa8c5476a49c1095d152218', '3fbdad9949edf66d099421003ec68bde17d5240305baecf2432a8e1bc2ff47a3', 'cd2ef92d046cbd5bbcedc60f1bfb412dca1b3e3352a9ac80e9d92679d38715ec'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='99d29ec48a3a088dbd786b411daabbc7111974b97abc271a2e338cf46c081302', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=8.0, score=18.79789262729119, accumulated_weight_raw="256", score_raw="455753", first_block=None, height=12, validation='full')), group_id=None), latest_event_id=39, stream_id=stream_id) # noqa: E501 + ] + + self.assert_response_equal(responses, expected) + + def test_nc_events(self) -> None: + stream_id = self.manager._event_manager._stream_id + assert stream_id is not None + artifacts = Scenario.NC_EVENTS.simulate(self.simulator, self.manager) + assert artifacts is not None + self._start_stream() + + b1, b2, b3 = artifacts.get_typed_vertices(['b1', 'b2', 'b3'], Block) + dummy = artifacts.get_typed_vertex('dummy', Transaction) + nc1, nc2, nc3, nc4 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3', 'nc4'], Transaction) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + assert nc4.is_nano_contract() + + responses = self._get_success_responses() + + expected = [ + # LOAD_STATED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # One NEW_VERTEX_ACCEPTED for each genesis (1 block and 2 txs) + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=self.genesis_block_hash, nonce=5, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=self.genesis_block_hash, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=self.genesis_tx1_hash, nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=self.genesis_tx1_hash, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=self.genesis_tx2_hash, nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=self.genesis_tx2_hash, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # LOAD_FINISHED + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # One VERTEX_METADATA_CHANGED for a new block (below), and one VERTEX_METADATA_CHANGED for each genesis tx (2), adding the new block as their child # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=b1.hash_hex, nonce=0, timestamp=1572636376, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPzq5l2BXiW8jWwfbfvsGUb10AyiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HCHTJiEmKp2E21AJdpfWz3NxHMmtNQRroi', timelock=None))], parents=[self.genesis_block_hash, self.genesis_tx2_hash, self.genesis_tx1_hash], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=b1.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[b2.hash_hex], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw='4', score_raw='16', first_block=None, height=1, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=self.genesis_tx2_hash, nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=self.genesis_tx2_hash, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[b1.hash_hex, dummy.hash_hex, nc1.hash_hex, b2.hash_hex], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=b1.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=self.genesis_tx1_hash, nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=self.genesis_tx1_hash, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[b1.hash_hex, dummy.hash_hex, nc1.hash_hex], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=b1.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=self.genesis_block_hash, nonce=5, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=self.genesis_block_hash, spent_outputs=[SpentOutput(index=0, tx_ids=[dummy.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[b1.hash_hex], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=b1.hash_hex, nonce=0, timestamp=1572636376, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPzq5l2BXiW8jWwfbfvsGUb10AyiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HCHTJiEmKp2E21AJdpfWz3NxHMmtNQRroi', timelock=None))], parents=[self.genesis_block_hash, self.genesis_tx2_hash, self.genesis_tx1_hash], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=b1.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[b2.hash_hex], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw='4', score_raw='16', first_block=None, height=1, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # dummy tx + its spent output being updated + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=dummy.hash_hex, nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id=self.genesis_block_hash, index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=[self.genesis_tx2_hash, self.genesis_tx1_hash], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=dummy.hash_hex, spent_outputs=[SpentOutput(index=0, tx_ids=[nc1.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=self.genesis_block_hash, nonce=5, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=self.genesis_block_hash, spent_outputs=[SpentOutput(index=0, tx_ids=[dummy.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[b1.hash_hex], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=dummy.hash_hex, nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id=self.genesis_block_hash, index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=[self.genesis_tx2_hash, self.genesis_tx1_hash], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=dummy.hash_hex, spent_outputs=[SpentOutput(index=0, tx_ids=[nc1.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # nc1 + its spent output being updated + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc1.hash_hex, nonce=0, timestamp=1572636378, signal_bits=0, version=4, weight=17.8819312127108, inputs=[TxInput(tx_id=dummy.hash_hex, index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUuREy3ZBqH13akgzR38nfAl3QLryIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPPfzpTiAfyj6gsbHG7FQsYUbVXaSg9MQK', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc1.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc2.hash_hex], twins=[], accumulated_weight=17.881932463618046, score=0.0, accumulated_weight_raw='241545', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=dummy.hash_hex, nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.777639275691804, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)), TxOutput(value=99999999996, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=dummy.hash_hex, spent_outputs=[SpentOutput(index=0, tx_ids=[nc1.hash_hex]), SpentOutput(index=1, tx_ids=[nc2.hash_hex]), SpentOutput(index=2, tx_ids=[nc3.hash_hex]), SpentOutput(index=3, tx_ids=[nc4.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.777639395103126, score=0.0, accumulated_weight_raw='3595197', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=nc1.hash_hex, nonce=0, timestamp=1572636378, signal_bits=0, version=4, weight=17.8819312127108, inputs=[TxInput(tx_id=dummy.hash_hex, index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUuREy3ZBqH13akgzR38nfAl3QLryIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPPfzpTiAfyj6gsbHG7FQsYUbVXaSg9MQK', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc1.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc2.hash_hex], twins=[], accumulated_weight=17.881932463618046, score=0.0, accumulated_weight_raw='241545', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # nc2 + its spent output being updated + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc2.hash_hex, nonce=0, timestamp=1572636379, signal_bits=0, version=4, weight=17.893191293894695, inputs=[TxInput(tx_id=dummy.hash_hex, index=1, spent_output=TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUoFBWk4Dv5WGbe4jvvVKoLI/iAMeIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HM8nk2q74k3nLh4FnUWfCgyuT165bmHyvu', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nc1.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc2.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc3.hash_hex, b2.hash_hex], twins=[], accumulated_weight=17.893188934434693, score=0.0, accumulated_weight_raw='243437', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=dummy.hash_hex, nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.777639275691804, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)), TxOutput(value=99999999996, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=dummy.hash_hex, spent_outputs=[SpentOutput(index=0, tx_ids=[nc1.hash_hex]), SpentOutput(index=1, tx_ids=[nc2.hash_hex]), SpentOutput(index=2, tx_ids=[nc3.hash_hex]), SpentOutput(index=3, tx_ids=[nc4.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.777639395103126, score=0.0, accumulated_weight_raw='3595197', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=nc2.hash_hex, nonce=0, timestamp=1572636379, signal_bits=0, version=4, weight=17.893191293894695, inputs=[TxInput(tx_id=dummy.hash_hex, index=1, spent_output=TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUoFBWk4Dv5WGbe4jvvVKoLI/iAMeIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HM8nk2q74k3nLh4FnUWfCgyuT165bmHyvu', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nc1.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc2.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc3.hash_hex, b2.hash_hex], twins=[], accumulated_weight=17.893188934434693, score=0.0, accumulated_weight_raw='243437', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # nc3 + its spent output being updated + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc3.hash_hex, nonce=0, timestamp=1572636380, signal_bits=0, version=4, weight=18.081839799876487, inputs=[TxInput(tx_id=dummy.hash_hex, index=2, spent_output=TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUp+HdqawCdjC61o+vrDbEpBOVOmuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HMpoopGYaybBqTotndHfdYxcZBgaqBZbuJ', timelock=None))], parents=[nc2.hash_hex, '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc3.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc4.hash_hex], twins=[], accumulated_weight=18.081842278001805, score=0.0, accumulated_weight_raw='277445', score_raw='0', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=dummy.hash_hex, nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.777639275691804, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)), TxOutput(value=99999999996, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=dummy.hash_hex, spent_outputs=[SpentOutput(index=0, tx_ids=[nc1.hash_hex]), SpentOutput(index=1, tx_ids=[nc2.hash_hex]), SpentOutput(index=2, tx_ids=[nc3.hash_hex]), SpentOutput(index=3, tx_ids=[nc4.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.777639395103126, score=0.0, accumulated_weight_raw='3595197', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=nc3.hash_hex, nonce=0, timestamp=1572636380, signal_bits=0, version=4, weight=18.081839799876487, inputs=[TxInput(tx_id=dummy.hash_hex, index=2, spent_output=TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUp+HdqawCdjC61o+vrDbEpBOVOmuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HMpoopGYaybBqTotndHfdYxcZBgaqBZbuJ', timelock=None))], parents=[nc2.hash_hex, '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc3.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc4.hash_hex], twins=[], accumulated_weight=18.081842278001805, score=0.0, accumulated_weight_raw='277445', score_raw='0', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # nc4 + its spent output being updated + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc4.hash_hex, nonce=0, timestamp=1572636381, signal_bits=0, version=4, weight=17.853538332406874, inputs=[TxInput(tx_id=dummy.hash_hex, index=3, spent_output=TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUzlvBv/iaHLPIkMzNTBRGei+uvVyIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRLFWEm9bosfc7Z4zZ7YsXACZTmmdA6nZd', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nc3.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc4.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=17.85354105039213, score=0.0, accumulated_weight_raw='236838', score_raw='0', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=dummy.hash_hex, nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.777639275691804, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)), TxOutput(value=99999999996, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=dummy.hash_hex, spent_outputs=[SpentOutput(index=0, tx_ids=[nc1.hash_hex]), SpentOutput(index=1, tx_ids=[nc2.hash_hex]), SpentOutput(index=2, tx_ids=[nc3.hash_hex]), SpentOutput(index=3, tx_ids=[nc4.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.777639395103126, score=0.0, accumulated_weight_raw='3595197', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=nc4.hash_hex, nonce=0, timestamp=1572636381, signal_bits=0, version=4, weight=17.853538332406874, inputs=[TxInput(tx_id=dummy.hash_hex, index=3, spent_output=TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUzlvBv/iaHLPIkMzNTBRGei+uvVyIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRLFWEm9bosfc7Z4zZ7YsXACZTmmdA6nZd', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nc3.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc4.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=17.85354105039213, score=0.0, accumulated_weight_raw='236838', score_raw='0', first_block=None, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # b2 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=b2.hash_hex, nonce=0, timestamp=1572636410, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUJ06XQc3Dh7A9qUwUwMzxnljQX0eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HA6xtQwPwLUceMNhQe8VVxdc3sT6yx8re6', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', nc2.hash_hex, '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=b2.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=21.960208086548366, accumulated_weight_raw='4', score_raw='4080199', first_block=None, height=2, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc2.hash_hex, nonce=0, timestamp=1572636379, signal_bits=0, version=4, weight=17.893191293894695, inputs=[TxInput(tx_id=dummy.hash_hex, index=1, spent_output=TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUoFBWk4Dv5WGbe4jvvVKoLI/iAMeIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HM8nk2q74k3nLh4FnUWfCgyuT165bmHyvu', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nc1.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc2.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc3.hash_hex, b2.hash_hex], twins=[], accumulated_weight=17.893188934434693, score=0.0, accumulated_weight_raw='243437', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc1.hash_hex, nonce=0, timestamp=1572636378, signal_bits=0, version=4, weight=17.8819312127108, inputs=[TxInput(tx_id=dummy.hash_hex, index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUuREy3ZBqH13akgzR38nfAl3QLryIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPPfzpTiAfyj6gsbHG7FQsYUbVXaSg9MQK', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc1.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc2.hash_hex], twins=[], accumulated_weight=17.881932463618046, score=0.0, accumulated_weight_raw='241545', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=dummy.hash_hex, nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.777639275691804, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)), TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)), TxOutput(value=99999999996, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=dummy.hash_hex, spent_outputs=[SpentOutput(index=0, tx_ids=[nc1.hash_hex]), SpentOutput(index=1, tx_ids=[nc2.hash_hex]), SpentOutput(index=2, tx_ids=[nc3.hash_hex]), SpentOutput(index=3, tx_ids=[nc4.hash_hex])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.777639395103126, score=0.0, accumulated_weight_raw='3595197', score_raw='0', first_block=b2.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.NC_EVENT, data=NCEventData(vertex_id=nc1.hash_hex, nc_id=nc1.hash_hex, nc_execution='success', first_block=b2.hash_hex, data_hex=b'test event on initialize 1'.hex()), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.NC_EVENT, data=NCEventData(vertex_id=nc2.hash_hex, nc_id=nc1.hash_hex, nc_execution='success', first_block=b2.hash_hex, data_hex=b'test event on initialize 2'.hex()), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=b2.hash_hex, nonce=0, timestamp=1572636410, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUJ06XQc3Dh7A9qUwUwMzxnljQX0eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HA6xtQwPwLUceMNhQe8VVxdc3sT6yx8re6', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', nc2.hash_hex, '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=b2.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=21.960208086548366, accumulated_weight_raw='4', score_raw='4080199', first_block=None, height=2, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + # b3 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc4.hash_hex, nonce=0, timestamp=1572636381, signal_bits=0, version=4, weight=17.853538332406874, inputs=[TxInput(tx_id=dummy.hash_hex, index=3, spent_output=TxOutput(value=1, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUzlvBv/iaHLPIkMzNTBRGei+uvVyIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRLFWEm9bosfc7Z4zZ7YsXACZTmmdA6nZd', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nc3.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc4.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[nc4.hash_hex, '6e632d6661696c'], received_by=[], children=[b3.hash_hex], twins=[], accumulated_weight=17.85354105039213, score=0.0, accumulated_weight_raw='236838', score_raw='0', first_block=b3.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=b3.hash_hex, nonce=0, timestamp=1572636441, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUedvINGyDuDTh8tywE28Gba3mgH+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHdTSHDJni47grVGYs4q3pi81qTEfDHL6S', timelock=None))], parents=[b2.hash_hex, nc2.hash_hex, nc4.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=b3.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=22.131472040908342, accumulated_weight_raw='4', score_raw='4594486', first_block=None, height=3, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash=nc3.hash_hex, nonce=0, timestamp=1572636380, signal_bits=0, version=4, weight=18.081839799876487, inputs=[TxInput(tx_id=dummy.hash_hex, index=2, spent_output=TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUp+HdqawCdjC61o+vrDbEpBOVOmuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HMpoopGYaybBqTotndHfdYxcZBgaqBZbuJ', timelock=None))], parents=[nc2.hash_hex, '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=nc3.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[nc4.hash_hex], twins=[], accumulated_weight=18.081842278001805, score=0.0, accumulated_weight_raw='277445', score_raw='0', first_block=b3.hash_hex, height=0, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.NC_EVENT, data=NCEventData(vertex_id=nc3.hash_hex, nc_id=nc2.hash_hex, nc_execution='success', first_block=b3.hash_hex, data_hex=b'test event on call_another'.hex()), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.NC_EVENT, data=NCEventData(vertex_id=nc3.hash_hex, nc_id=nc1.hash_hex, nc_execution='success', first_block=b3.hash_hex, data_hex=b'test event on some_method'.hex()), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash=b3.hash_hex, nonce=0, timestamp=1572636441, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUedvINGyDuDTh8tywE28Gba3mgH+IrA==', decoded=DecodedTxOutput(type='P2PKH', address='HHdTSHDJni47grVGYs4q3pi81qTEfDHL6S', timelock=None))], parents=[b2.hash_hex, nc2.hash_hex, nc4.hash_hex], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash=b3.hash_hex, spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=22.131472040908342, accumulated_weight_raw='4', score_raw='4594486', first_block=None, height=3, validation='full')), group_id=None), latest_event_id=37, stream_id=stream_id), # noqa: E501 + ] + + self.assert_response_equal(responses, expected) + + def test_nc_events_reorg(self) -> None: + stream_id = self.manager._event_manager._stream_id + assert stream_id is not None + artifacts = Scenario.NC_EVENTS_REORG.simulate(self.simulator, self.manager) + assert artifacts is not None + self._start_stream() + + responses = self._get_success_responses() + + expected = [ + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=0, timestamp=0, type=EventType.LOAD_STARTED, data=EmptyData(), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=1, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', nonce=5, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=2, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=3, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=4, timestamp=0, type=EventType.LOAD_FINISHED, data=EmptyData(), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=5, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', nonce=0, timestamp=1572636376, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPzq5l2BXiW8jWwfbfvsGUb10AyiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HCHTJiEmKp2E21AJdpfWz3NxHMmtNQRroi', timelock=None))], parents=['2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw='4', score_raw='16', first_block=None, height=1, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=6, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', nonce=2, timestamp=1572636345, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=7, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', nonce=6, timestamp=1572636344, signal_bits=0, version=1, weight=2.0, inputs=[], outputs=[], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', 'f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=8, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', nonce=5, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', spent_outputs=[SpentOutput(index=0, tx_ids=['26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea'])], conflict_with=[], voided_by=[], received_by=[], children=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=9, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', nonce=0, timestamp=1572636376, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUPzq5l2BXiW8jWwfbfvsGUb10AyiIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HCHTJiEmKp2E21AJdpfWz3NxHMmtNQRroi', timelock=None))], parents=['2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c'], twins=[], accumulated_weight=2.0, score=4.0, accumulated_weight_raw='4', score_raw='16', first_block=None, height=1, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=10, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=11, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', nonce=5, timestamp=1572636343, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None))], parents=[], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', spent_outputs=[SpentOutput(index=0, tx_ids=['26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea'])], conflict_with=[], voided_by=[], received_by=[], children=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6'], twins=[], accumulated_weight=2.0, score=2.0, accumulated_weight_raw='4', score_raw='4', first_block=None, height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=12, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=13, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nonce=0, timestamp=1572636378, signal_bits=0, version=1, weight=17.90997909200292, inputs=[TxInput(tx_id='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=17.909980953144554, score=0.0, accumulated_weight_raw='246287', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution='success')), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=14, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=15, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nonce=0, timestamp=1572636378, signal_bits=0, version=1, weight=17.90997909200292, inputs=[TxInput(tx_id='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=17.909980953144554, score=0.0, accumulated_weight_raw='246287', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution='success')), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=16, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', nonce=0, timestamp=1572636409, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=17, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nonce=0, timestamp=1572636378, signal_bits=0, version=1, weight=17.90997909200292, inputs=[TxInput(tx_id='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=17.909980953144554, score=0.0, accumulated_weight_raw='246287', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution='success')), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=18, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + # nc1 is executed for the first time + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=19, timestamp=0, type=EventType.NC_EVENT, data=NCEventData(vertex_id='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nc_id='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nc_execution='success', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', data_hex=b'test event on initialize 1'.hex()), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=20, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', nonce=0, timestamp=1572636409, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=21, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', nonce=0, timestamp=1572636440, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', spent_outputs=[], conflict_with=[], voided_by=['d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c'], received_by=[], children=['385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=22, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', nonce=0, timestamp=1572636409, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=23, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nonce=0, timestamp=1572636378, signal_bits=0, version=1, weight=17.90997909200292, inputs=[TxInput(tx_id='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=17.909980953144554, score=0.0, accumulated_weight_raw='246287', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution='success')), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=24, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=25, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', nonce=0, timestamp=1572636440, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', spent_outputs=[], conflict_with=[], voided_by=['d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c'], received_by=[], children=['385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + # a reorg happens, reexecuting nc1 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=26, timestamp=0, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=1, previous_best_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', new_best_block='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', common_block='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6'), group_id=0), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=27, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', nonce=0, timestamp=1572636471, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUuREy3ZBqH13akgzR38nfAl3QLryIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPPfzpTiAfyj6gsbHG7FQsYUbVXaSg9MQK', timelock=None))], parents=['d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', spent_outputs=[], conflict_with=[], voided_by=['385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=21.267583962408054, accumulated_weight_raw='4', score_raw='2524531', first_block=None, height=3, validation='full', nc_execution=None)), group_id=0), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=28, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', nonce=0, timestamp=1572636440, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', spent_outputs=[], conflict_with=[], voided_by=['d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c'], received_by=[], children=['385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=0), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=29, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nonce=0, timestamp=1572636378, signal_bits=0, version=1, weight=17.90997909200292, inputs=[TxInput(tx_id='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=17.909980953144554, score=0.0, accumulated_weight_raw='246287', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution='success')), group_id=0), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=30, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=0), latest_event_id=48, stream_id=stream_id), # noqa: E501 + # nc1 is executed for the second time + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=31, timestamp=0, type=EventType.NC_EVENT, data=NCEventData(vertex_id='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nc_id='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nc_execution='success', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', data_hex=b'test event on initialize 1'.hex()), group_id=0), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=32, timestamp=0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=0), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=33, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', nonce=0, timestamp=1572636471, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUuREy3ZBqH13akgzR38nfAl3QLryIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPPfzpTiAfyj6gsbHG7FQsYUbVXaSg9MQK', timelock=None))], parents=['d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', spent_outputs=[], conflict_with=[], voided_by=['385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=21.267583962408054, accumulated_weight_raw='4', score_raw='2524531', first_block=None, height=3, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=34, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', nonce=0, timestamp=1572636502, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUoFBWk4Dv5WGbe4jvvVKoLI/iAMeIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HM8nk2q74k3nLh4FnUWfCgyuT165bmHyvu', timelock=None))], parents=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=2.0, score=21.267583962408054, accumulated_weight_raw='4', score_raw='2524531', first_block=None, height=3, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=35, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', nonce=0, timestamp=1572636471, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUuREy3ZBqH13akgzR38nfAl3QLryIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HPPfzpTiAfyj6gsbHG7FQsYUbVXaSg9MQK', timelock=None))], parents=['d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', spent_outputs=[], conflict_with=[], voided_by=['385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad'], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=21.267583962408054, accumulated_weight_raw='4', score_raw='2524531', first_block=None, height=3, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=36, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', nonce=0, timestamp=1572636440, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUH3DCvvDLG1Vgxw9QKnqEFygPQbKIrA==', decoded=DecodedTxOutput(type='P2PKH', address='H9PNR3V6vdzSr7t394NGWB5uabCuhtN1LV', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', spent_outputs=[], conflict_with=[], voided_by=['d1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c'], received_by=[], children=['385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=37, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nonce=0, timestamp=1572636378, signal_bits=0, version=1, weight=17.90997909200292, inputs=[TxInput(tx_id='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=17.909980953144554, score=0.0, accumulated_weight_raw='246287', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution='success')), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=38, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=39, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', nonce=0, timestamp=1572636502, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUoFBWk4Dv5WGbe4jvvVKoLI/iAMeIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HM8nk2q74k3nLh4FnUWfCgyuT165bmHyvu', timelock=None))], parents=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=2.0, score=21.267583962408054, accumulated_weight_raw='4', score_raw='2524531', first_block=None, height=3, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id), # noqa: E501 + # another reorg happens, reexecuting nc1 again + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=40, timestamp=0, type=EventType.REORG_STARTED, data=ReorgData(reorg_size=2, previous_best_block='385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', new_best_block='be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1', common_block='76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6'), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=41, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1', nonce=0, timestamp=1572636533, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUp+HdqawCdjC61o+vrDbEpBOVOmuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HMpoopGYaybBqTotndHfdYxcZBgaqBZbuJ', timelock=None))], parents=['a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=21.267586248288318, accumulated_weight_raw='4', score_raw='2524535', first_block=None, height=4, validation='full', nc_execution=None)), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=42, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', nonce=0, timestamp=1572636502, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUoFBWk4Dv5WGbe4jvvVKoLI/iAMeIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HM8nk2q74k3nLh4FnUWfCgyuT165bmHyvu', timelock=None))], parents=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=2.0, score=21.267583962408054, accumulated_weight_raw='4', score_raw='2524531', first_block=None, height=3, validation='full', nc_execution=None)), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=43, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', nonce=0, timestamp=1572636409, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUSs2wh6dehIfZfwWtUCkvVL6mm8eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HDLeo6r8wB8schqaMxrLP1HrrpBVZeRfWY', timelock=None))], parents=['76d8bb18b24173fe72898faf2a09606b09af78a22cb3a087cd88f2faed67c0f6', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842'], twins=[], accumulated_weight=2.0, score=21.267581676524166, accumulated_weight_raw='4', score_raw='2524527', first_block=None, height=2, validation='full', nc_execution=None)), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=44, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nonce=0, timestamp=1572636378, signal_bits=0, version=1, weight=17.90997909200292, inputs=[TxInput(tx_id='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', index=0, spent_output=TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkUywjnDVLCo2DvKimDErqN97qgd+SIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HR2gDr1CEhH8oaBPdjgMddzcxiESgmPdPQ', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=['f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', 'd1fa99db1041855845d4bbf76603b124190aa19fd2e65e689faa4062497f4b7c', '385811b93c2926cc9e599f2ea9f482af98def3e0b394d4e6cb6a151f4a5127ad', 'a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', 'be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1'], twins=[], accumulated_weight=17.909980953144554, score=0.0, accumulated_weight_raw='246287', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution='success')), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=45, timestamp=0, type=EventType.VERTEX_METADATA_CHANGED, data=TxData(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', nonce=0, timestamp=1572636377, signal_bits=0, version=1, weight=21.119475351738224, inputs=[TxInput(tx_id='2ebb3b8edcb72a7e46cc0efacfe1b109e2e9dd868a90fe0906968dc8fbbf6488', index=0, spent_output=TxOutput(value=100000000000, token_data=0, script='dqkU0HvILW4NG7EWYUB2ZF6bh8jIO0GIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ', timelock=None)))], outputs=[TxOutput(value=1, token_data=0, script='dqkU80nn8+AtzngP5Dna6XSwXuuCC/eIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HUhX83RJxSmUoffvmTNsLa4kAf33Y7GtuA', timelock=None)), TxOutput(value=99999999999, token_data=0, script='dqkUf5BkCh0suqADvCnN6YjtldkKnueIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HJ9dB9tKwFs8MGvcgdSpZZbVXWxpFZ9tBs', timelock=None))], parents=['16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='26c218df92c9d6410c9b9a7093a35cb046f959e12dc8727b7cc6671c78177dea', spent_outputs=[SpentOutput(index=0, tx_ids=['94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'])], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=21.11947563927789, score=0.0, accumulated_weight_raw='2278220', score_raw='0', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', height=0, validation='full', nc_execution=None)), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + # nc1 is reexecuted for the third and final time + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=46, timestamp=0, type=EventType.NC_EVENT, data=NCEventData(vertex_id='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nc_id='94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c', nc_execution='success', first_block='f3201793cb10fb5d3821773f169d33934a4127c5636128bf07c2e266a2ca3e5f', data_hex=b'test event on initialize 1'.hex()), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=47, timestamp=0, type=EventType.REORG_FINISHED, data=EmptyData(), group_id=1), latest_event_id=48, stream_id=stream_id), # noqa: E501 + EventResponse(type='EVENT', peer_id=self.peer_id, network='unittests', event=BaseEvent(id=48, timestamp=0, type=EventType.NEW_VERTEX_ACCEPTED, data=TxData(hash='be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1', nonce=0, timestamp=1572636533, signal_bits=0, version=0, weight=2.0, inputs=[], outputs=[TxOutput(value=6400, token_data=0, script='dqkUp+HdqawCdjC61o+vrDbEpBOVOmuIrA==', decoded=DecodedTxOutput(type='P2PKH', address='HMpoopGYaybBqTotndHfdYxcZBgaqBZbuJ', timelock=None))], parents=['a67ca8b6c423b08d06f61e5aae8b58ae00e028b8ae914b9c7943f27f01749842', '16ba3dbe424c443e571b00840ca54b9ff4cff467e10b6a15536e718e2008f952', '94b6278e7bac5b5ebb0888a0eb742868111b3a04b004cfbaf3b1786504815c8c'], tokens=[], token_name=None, token_symbol=None, aux_pow=None, metadata=TxMetadata(hash='be9f0c15423b88ddb7b0a4ef4c86f252b4d4e179f4e9ae357775766fca7226c1', spent_outputs=[], conflict_with=[], voided_by=[], received_by=[], children=[], twins=[], accumulated_weight=2.0, score=21.267586248288318, accumulated_weight_raw='4', score_raw='2524535', first_block=None, height=4, validation='full', nc_execution=None)), group_id=None), latest_event_id=48, stream_id=stream_id) # noqa: E501 ] self.assert_response_equal(responses, expected) @@ -495,16 +645,8 @@ def _start_stream(self) -> None: def _remove_timestamp(responses: list[EventResponse]) -> list[EventResponse]: for response in responses: - # We remove the timestamp from the comparison as it's not important and can be affected by other parts of - # the code. + # We remove the event timestamp from the comparison as it's not important and + # can be affected by other parts of the code. del response.event.timestamp return responses - - -class MemoryEventSimulationScenariosTest(BaseEventSimulationScenariosTest, MemoryEventSimulationTester): - __test__ = True - - -class RocksDBEventSimulationScenariosTest(BaseEventSimulationScenariosTest, RocksDBEventSimulationTester): - __test__ = True diff --git a/tests/event/test_event_storage.py b/tests/event/test_event_storage.py index b0b368d2f..758602129 100644 --- a/tests/event/test_event_storage.py +++ b/tests/event/test_event_storage.py @@ -1,23 +1,17 @@ -import tempfile - from hathor.event.model.base_event import BaseEvent from hathor.event.model.node_state import NodeState -from hathor.event.storage import EventStorage -from hathor.event.storage.memory_storage import EventMemoryStorage from hathor.event.storage.rocksdb_storage import EventRocksDBStorage -from hathor.storage.rocksdb_storage import RocksDBStorage from tests import unittest from tests.utils import EventMocker -class EventStorageBaseTest(unittest.TestCase): - __test__ = False - - event_storage: EventStorage - +class EventStorageTest(unittest.TestCase): def setUp(self) -> None: super().setUp() self.event_mocker = EventMocker(self.rng) + self.event_storage = EventRocksDBStorage( + rocksdb_storage=self.create_rocksdb_storage(), + ) def test_save_event_and_retrieve(self) -> None: event = self.event_mocker.generate_mocked_event() @@ -233,22 +227,3 @@ def test_reset_all_full_database(self) -> None: assert node_state is None assert event_queue_state is False - - -class EventStorageRocksDBTest(EventStorageBaseTest): - __test__ = True - - def setUp(self) -> None: - super().setUp() - self.directory = tempfile.mkdtemp() - self.tmpdirs.append(self.directory) - self.rocksdb_storage = RocksDBStorage(path=self.directory) - self.event_storage = EventRocksDBStorage(self.rocksdb_storage) - - -class EventStorageMemoryTest(EventStorageBaseTest): - __test__ = True - - def setUp(self) -> None: - super().setUp() - self.event_storage = EventMemoryStorage() diff --git a/tests/event/websocket/test_factory.py b/tests/event/websocket/test_factory.py index 24feeab98..3fbbd6711 100644 --- a/tests/event/websocket/test_factory.py +++ b/tests/event/websocket/test_factory.py @@ -17,11 +17,12 @@ import pytest from hathor.conf.get_settings import get_global_settings -from hathor.event.storage import EventMemoryStorage +from hathor.event.storage import EventRocksDBStorage from hathor.event.websocket.factory import EventWebsocketFactory from hathor.event.websocket.protocol import EventWebsocketProtocol from hathor.event.websocket.response import EventResponse, InvalidRequestType from hathor.simulator.clock import MemoryReactorHeapClock +from hathor.storage import RocksDBStorage from tests.utils import EventMocker @@ -157,7 +158,9 @@ def _get_factory( n_starting_events: int = 0, clock: MemoryReactorHeapClock = MemoryReactorHeapClock() ) -> EventWebsocketFactory: - event_storage = EventMemoryStorage() + event_storage = EventRocksDBStorage( + rocksdb_storage=RocksDBStorage.create_temp(), + ) for event_id in range(n_starting_events): event = EventMocker.create_event(event_id) diff --git a/tests/event/websocket/test_protocol.py b/tests/event/websocket/test_protocol.py index a13778876..2d64f42f8 100644 --- a/tests/event/websocket/test_protocol.py +++ b/tests/event/websocket/test_protocol.py @@ -101,11 +101,12 @@ def test_send_event_response() -> None: b'"timestamp":123.0,"type":"VERTEX_METADATA_CHANGED","data":{"hash":"abc","nonce":123,' b'"timestamp":456,"signal_bits":0,"version":1,"weight":10.0,"inputs":[],"outputs":[],' b'"parents":[],' - b'"tokens":[],"token_name":null,"token_symbol":null,"aux_pow":null,"metadata":{"hash":"abc",' + b'"tokens":[],"token_name":null,"token_symbol":null,"aux_pow":null,"headers":[],' + b'"metadata":{"hash":"abc",' b'"spent_outputs":[],"conflict_with":[],"voided_by":[],"received_by":[],"children":[],' b'"twins":[],"accumulated_weight":10.0,"score":20.0,"accumulated_weight_raw":"1024",' b'"score_raw":"1048576","first_block":null,"height":100,' - b'"validation":"validation"}},"group_id":null},"latest_event_id":10,' + b'"validation":"validation","nc_execution":null}},"group_id":null},"latest_event_id":10,' b'"stream_id":"stream_id"}') protocol.sendMessage.assert_called_once_with(expected_payload) diff --git a/tests/feature_activation/test_feature_service.py b/tests/feature_activation/test_feature_service.py index f042b4e45..ce0efd871 100644 --- a/tests/feature_activation/test_feature_service.py +++ b/tests/feature_activation/test_feature_service.py @@ -29,16 +29,17 @@ from hathor.feature_activation.model.feature_info import FeatureInfo from hathor.feature_activation.model.feature_state import FeatureState from hathor.feature_activation.settings import Settings as FeatureSettings -from hathor.indexes import MemoryIndexesManager from hathor.transaction import Block -from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage +from hathor.transaction.storage import TransactionStorage from hathor.transaction.validation_state import ValidationState from hathor.util import not_none +from tests.unittest import TestBuilder def get_storage(settings: HathorSettings, *, up_to_height: int) -> TransactionStorage: - indexes = MemoryIndexesManager() - storage = TransactionMemoryStorage(indexes=indexes, settings=settings) + artifacts = TestBuilder(settings).build() + storage = artifacts.tx_storage + indexes = not_none(artifacts.indexes) feature_activation_bits = [ 0b0000, # 0: boundary block 0b0010, diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index cfa97f822..17899666f 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -19,6 +19,7 @@ from hathor.builder import Builder from hathor.conf.get_settings import get_global_settings +from hathor.exception import InvalidNewTransaction from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import FeatureService from hathor.feature_activation.model.criteria import Criteria @@ -108,7 +109,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*10) tx.weight = 25 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=10, @@ -146,7 +147,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*19) tx.weight = 25 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=19, @@ -183,7 +184,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*20) tx.weight = 25 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=20, @@ -222,7 +223,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*55) tx.weight = 30 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=55, @@ -258,7 +259,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*56) tx.weight = 30 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=56, @@ -298,9 +299,10 @@ def test_feature(self) -> None: non_signaling_block.init_static_metadata_from_storage(settings, manager.tx_storage) with pytest.raises(BlockMustSignalError): - manager.verification_service.verify(non_signaling_block) + manager.verification_service.verify(non_signaling_block, self.verification_params) - assert not manager.propagate_tx(non_signaling_block) + with pytest.raises(InvalidNewTransaction): + manager.propagate_tx(non_signaling_block) # at block 59, the feature is MUST_SIGNAL, just before becoming LOCKED_IN: [*_, last_block] = add_new_blocks(manager, num_blocks=2, signal_bits=0b1) @@ -308,7 +310,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*59) tx.weight = 30 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=59, @@ -345,7 +347,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*60) tx.weight = 30 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=60, @@ -384,7 +386,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*71) tx.weight = 30 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=71, @@ -420,7 +422,7 @@ def test_feature(self) -> None: tx = gen_new_tx(manager, address, 6400*72) tx.weight = 30 tx.update_hash() - assert manager.propagate_tx(tx, fails_silently=False) + assert manager.propagate_tx(tx) result = self._get_result(web_client) assert result == dict( block_height=72, @@ -663,13 +665,6 @@ def test_reorg(self) -> None: assert artifacts.bit_signaling_service.get_not_support_features() == [Feature.NOP_FEATURE_1] -class MemoryStorageFeatureSimulationTest(BaseFeatureSimulationTest): - __test__ = True - - def get_simulator_builder(self) -> Builder: - return self.simulator.get_default_builder() - - class RocksDBStorageFeatureSimulationTest(BaseFeatureSimulationTest): __test__ = True @@ -681,8 +676,7 @@ def get_rocksdb_directory(self) -> str: def get_simulator_builder_from_dir(self, rocksdb_directory: str) -> Builder: return self.simulator.get_default_builder() \ - .use_rocksdb(path=rocksdb_directory) \ - .disable_full_verification() + .set_rocksdb_path(path=rocksdb_directory) def get_simulator_builder(self) -> Builder: rocksdb_directory = self.get_rocksdb_directory() diff --git a/tests/nanocontracts/__init__.py b/tests/nanocontracts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/nanocontracts/blueprints/__init__.py b/tests/nanocontracts/blueprints/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/nanocontracts/blueprints/test_bet.py b/tests/nanocontracts/blueprints/test_bet.py new file mode 100644 index 000000000..b6bb7d2d3 --- /dev/null +++ b/tests/nanocontracts/blueprints/test_bet.py @@ -0,0 +1,220 @@ +import inspect +import os +import re +from typing import NamedTuple, Optional + +from hathor.conf import HathorSettings +from hathor.crypto.util import decode_address +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.types import ( + Address, + Amount, + ContractId, + NCDepositAction, + NCWithdrawalAction, + SignedData, + Timestamp, + TokenUid, + TxOutputScript, + VertexId, +) +from hathor.transaction import BaseTransaction +from hathor.transaction.scripts import P2PKH +from hathor.util import not_none +from hathor.wallet import KeyPair +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.test_blueprints import bet + +settings = HathorSettings() + +TX_OUTPUT_SCRIPT_NC_TYPE = make_nc_type(TxOutputScript) +RESULT_NC_TYPE: NCType[str | None] = make_nc_type(str | None) # type: ignore[arg-type] +TIMESTAMP_NC_TYPE = make_nc_type(Timestamp) +TOKEN_UID_NC_TYPE = make_nc_type(TokenUid) + + +class BetInfo(NamedTuple): + key: KeyPair + address: Address + amount: Amount + score: str + + +class NCBetBlueprintTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.blueprint_id = self.register_blueprint_file(inspect.getfile(bet)) + self.token_uid = TokenUid(settings.HATHOR_TOKEN_UID) + self.nc_id = ContractId(VertexId(b'1' * 32)) + self.initialize_contract() + self.nc_storage = self.runner.get_storage(self.nc_id) + + def _get_any_tx(self) -> BaseTransaction: + genesis = self.manager.tx_storage.get_all_genesis() + tx = [t for t in genesis if t.is_transaction][0] + return tx + + def _get_any_address(self) -> tuple[Address, KeyPair]: + password = os.urandom(12) + key = KeyPair.create(password) + address_b58 = key.address + address_bytes = Address(decode_address(not_none(address_b58))) + return address_bytes, key + + def get_current_timestamp(self) -> int: + return int(self.clock.seconds()) + + def _make_a_bet(self, amount: int, score: str, *, timestamp: Optional[int] = None) -> BetInfo: + (address_bytes, key) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=amount) + if timestamp is None: + timestamp = self.get_current_timestamp() + context = Context([action], tx, address_bytes, timestamp=timestamp) + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + return BetInfo(key=key, address=Address(address_bytes), amount=Amount(amount), score=score) + + def _set_result(self, result: str, oracle_key: Optional[KeyPair] = None) -> None: + signed_result = SignedData[str](result, b'') + + if oracle_key is None: + oracle_key = self.oracle_key + + result_bytes = signed_result.get_data_bytes(self.nc_id) + signed_result.script_input = oracle_key.p2pkh_create_input_data(b'123', result_bytes) + + tx = self._get_any_tx() + context = Context([], tx, Address(b''), timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'set_result', context, signed_result) + final_result = self.nc_storage.get_obj(b'final_result', RESULT_NC_TYPE) + self.assertEqual(final_result, '2x2') + + def _withdraw(self, address: Address, amount: int) -> None: + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, address, timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def initialize_contract(self) -> None: + self.oracle_key = KeyPair.create(b'123') + assert self.oracle_key.address is not None + self.oracle_script = P2PKH(self.oracle_key.address).get_script() + self.date_last_bet = self.get_current_timestamp() + 3600 * 24 + self.runner.create_contract( + self.nc_id, + self.blueprint_id, + Context([], self._get_any_tx(), Address(b''), timestamp=self.get_current_timestamp()), + self.oracle_script, + self.token_uid, + self.date_last_bet, + ) + + def test_blueprint_initialization(self) -> None: + # if initialization was correct we should be able to observe these in the nc_storage: + self.assertEqual(self.nc_storage.get_obj(b'oracle_script', TX_OUTPUT_SCRIPT_NC_TYPE), self.oracle_script) + self.assertEqual(self.nc_storage.get_obj(b'token_uid', TOKEN_UID_NC_TYPE), self.token_uid) + self.assertEqual(self.nc_storage.get_obj(b'date_last_bet', TIMESTAMP_NC_TYPE), self.date_last_bet) + + def test_basic_flow(self) -> None: + runner = self.runner + + tx = self._get_any_tx() + + ### + # Make some bets. + ### + self._make_a_bet(100, '1x1') + self._make_a_bet(200, '1x1') + self._make_a_bet(300, '1x1') + bet1 = self._make_a_bet(500, '2x2') + + ### + # Set the final result. + ### + self._set_result('2x2') + + ### + # Single winner withdraws all funds. + ### + self.assertEqual(1100, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 100) + self.assertEqual(1000, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 1000) + self.assertEqual(0, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + # Out of funds! Any withdrawal must fail from now on... + amount = 1 + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InsufficientBalance', 'withdrawal amount is greater than available (max: 0)'): + runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_with_withdrawal(self) -> None: + self._make_a_bet(100, '1x1') + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('NCForbiddenAction', 'action WITHDRAWAL is forbidden on method `bet`'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_make_a_bet_after_result(self) -> None: + self._make_a_bet(100, '1x1') + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._make_a_bet(100, '1x1') + + def test_make_a_bet_after_date_last_bet(self) -> None: + with self.assertNCFail('TooLate', re.compile(r'cannot place bets after \d+')): + self._make_a_bet(100, '1x1', timestamp=self.date_last_bet + 1) + + def test_set_results_two_times(self) -> None: + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._set_result('5x1') + + def test_set_results_wrong_signature(self) -> None: + wrong_oracle_key = KeyPair.create(b'123') + with self.assertNCFail('InvalidOracleSignature', ''): + self._set_result('3x2', oracle_key=wrong_oracle_key) + + def test_withdraw_before_result(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + with self.assertNCFail('ResultNotAvailable', ''): + self._withdraw(bet1.address, 100) + + def test_withdraw_with_deposits(self) -> None: + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + with self.assertNCFail('NCForbiddenAction', 'action DEPOSIT is forbidden on method `withdraw`'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_wrong_token(self) -> None: + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCDepositAction(token_uid=token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_withdraw_wrong_token(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCWithdrawalAction(token_uid=token_uid, amount=1) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) diff --git a/tests/nanocontracts/blueprints/test_swap_demo.py b/tests/nanocontracts/blueprints/test_swap_demo.py new file mode 100644 index 000000000..059282990 --- /dev/null +++ b/tests/nanocontracts/blueprints/test_swap_demo.py @@ -0,0 +1,116 @@ +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.nc_types import make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import NCDepositAction, NCWithdrawalAction, TokenUid +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.test_blueprints.swap_demo import InvalidActions, InvalidRatio, InvalidTokens, SwapDemo + +SWAP_NC_TYPE = make_nc_type(int) + + +class SwapDemoTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + + self.blueprint_id = self.gen_random_blueprint_id() + self.contract_id = self.gen_random_contract_id() + + self.nc_catalog.blueprints[self.blueprint_id] = SwapDemo + + # Test doubles: + self.token_a = self.gen_random_token_uid() + self.token_b = self.gen_random_token_uid() + self.token_c = self.gen_random_token_uid() + self.address = self.gen_random_address() + self.tx = self.get_genesis_tx() + + def _initialize( + self, + init_token_a: tuple[TokenUid, int, int], + init_token_b: tuple[TokenUid, int, int] + ) -> None: + # Arrange: + token_a, multiplier_a, amount_a = init_token_a + token_b, multiplier_b, amount_b = init_token_b + deposit_a = NCDepositAction(token_uid=token_a, amount=amount_a) + deposit_b = NCDepositAction(token_uid=token_b, amount=amount_b) + context = Context( + actions=[deposit_a, deposit_b], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + + # Act: + self.runner.create_contract( + self.contract_id, + self.blueprint_id, + context, + token_a, + token_b, + multiplier_a, + multiplier_b, + ) + self.nc_storage = self.runner.get_storage(self.contract_id) + + def _swap( + self, + amount_a: tuple[int, TokenUid], + amount_b: tuple[int, TokenUid] + ) -> None: + # Arrange: + value_a, token_a = amount_a + value_b, token_b = amount_b + action_a_type = self.get_action_type(value_a) + action_b_type = self.get_action_type(value_b) + swap_a = action_a_type(token_uid=token_a, amount=abs(value_a)) + swap_b = action_b_type(token_uid=token_b, amount=abs(value_b)) + context = Context( + actions=[swap_a, swap_b], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + + # Act: + self.runner.call_public_method(self.contract_id, 'swap', context) + + def test_lifecycle(self) -> None: + # Create a contract. + # Arrange and act within: + self._initialize((self.token_a, 1, 100_00), (self.token_b, 1, 100_00)) + + # Assert: + self.assertEqual( + Balance(value=100_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_a) + ) + self.assertEqual( + Balance(value=100_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_b) + ) + self.assertEqual(0, self.nc_storage.get_obj(b'swaps_counter', SWAP_NC_TYPE)) + + # Make a valid swap. + # Arrange and act within: + self._swap((20_00, self.token_a), (-20_00, self.token_b)) + # Assert: + self.assertEqual( + Balance(value=120_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_a) + ) + self.assertEqual( + Balance(value=80_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_b) + ) + self.assertEqual(1, self.nc_storage.get_obj(b'swaps_counter', SWAP_NC_TYPE)) + + # Make multiple invalid swaps raising all possible exceptions. + with self.assertRaises(InvalidTokens): + self._swap((-20_00, self.token_a), (20_00, self.token_c)) + with self.assertRaises(InvalidActions): + self._swap((20_00, self.token_a), (40_00, self.token_b)) + with self.assertRaises(InvalidRatio): + self._swap((20_00, self.token_a), (-40_00, self.token_b)) + + def get_action_type(self, amount: int) -> type[NCDepositAction] | type[NCWithdrawalAction]: + if amount >= 0: + return NCDepositAction + else: + return NCWithdrawalAction diff --git a/tests/nanocontracts/blueprints/unittest.py b/tests/nanocontracts/blueprints/unittest.py new file mode 100644 index 000000000..c913f9a92 --- /dev/null +++ b/tests/nanocontracts/blueprints/unittest.py @@ -0,0 +1,163 @@ +from io import TextIOWrapper +from os import PathLike + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.crypto.util import decode_address +from hathor.manager import HathorManager +from hathor.nanocontracts import Context +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.blueprint_env import BlueprintEnvironment +from hathor.nanocontracts.nc_exec_logs import NCLogConfig +from hathor.nanocontracts.on_chain_blueprint import Code, OnChainBlueprint +from hathor.nanocontracts.storage import NCBlockStorage, NCMemoryStorageFactory +from hathor.nanocontracts.storage.backends import MemoryNodeTrieStore +from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie +from hathor.nanocontracts.types import Address, BlueprintId, ContractId, NCAction, TokenUid, VertexId +from hathor.nanocontracts.vertex_data import VertexData +from hathor.transaction import BaseTransaction, Transaction +from hathor.util import not_none +from hathor.verification.on_chain_blueprint_verifier import OnChainBlueprintVerifier +from hathor.wallet import KeyPair +from tests import unittest +from tests.nanocontracts.utils import TestRunner + + +class BlueprintTestCase(unittest.TestCase): + def setUp(self): + super().setUp() + self.manager = self.build_manager() + self.rng = self.manager.rng + self.wallet = self.manager.wallet + self.reactor = self.manager.reactor + self.nc_catalog = self.manager.tx_storage.nc_catalog + + self.htr_token_uid = HATHOR_TOKEN_UID + self.runner = self.build_runner() + self.now = int(self.reactor.seconds()) + + self._token_index = 1 + + def build_manager(self) -> HathorManager: + """Create a HathorManager instance.""" + return self.create_peer('unittests', nc_indexes=True, nc_log_config=NCLogConfig.FAILED, wallet_index=True) + + def get_readonly_contract(self, contract_id: ContractId) -> Blueprint: + """ Returns a read-only instance of a given contract to help testing it. + + The returned instance cannot be used for writing, use `get_readwrite_contract` if you need to test writing. + """ + return self._get_contract_instance(contract_id, locked=True) + + def get_readwrite_contract(self, contract_id: ContractId) -> Blueprint: + """ Returns a read-write instance of a given contract to help testing it. + + The returned instance can be used to write attributes, if you don't need to write anything it is recommended to + use `get_readonly_contract` instead to avoid accidental writes. + """ + return self._get_contract_instance(contract_id, locked=False) + + def _get_contract_instance(self, contract_id: ContractId, *, locked: bool) -> Blueprint: + """ Implementation of `get_readonly_contract` and `get_readwrite_contract`, only difference is `locked` + """ + from hathor.nanocontracts.nc_exec_logs import NCLogger + runner = self.runner + contract_storage = runner.get_storage(contract_id) + if locked: + contract_storage.lock() + else: + contract_storage.unlock() + nc_logger = NCLogger(__reactor__=runner.reactor, __nc_id__=contract_id) + env = BlueprintEnvironment(runner, nc_logger, contract_storage, disable_cache=True) + blueprint_id = runner.get_blueprint_id(contract_id) + blueprint_class = runner.tx_storage.get_blueprint_class(blueprint_id) + contract = blueprint_class(env) + return contract + + def _register_blueprint_class( + self, + blueprint_class: type[Blueprint], + blueprint_id: BlueprintId | None = None, + ) -> BlueprintId: + """Register a blueprint class with an optional id, allowing contracts to be created from it.""" + if blueprint_id is None: + blueprint_id = self.gen_random_blueprint_id() + + assert blueprint_id not in self.nc_catalog.blueprints + self.nc_catalog.blueprints[blueprint_id] = blueprint_class + return blueprint_id + + def register_blueprint_file(self, path: PathLike[str], blueprint_id: BlueprintId | None = None) -> BlueprintId: + """Register a blueprint file with an optional id, allowing contracts to be created from it.""" + with open(path, 'r') as f: + return self.register_blueprint_contents(f, blueprint_id) + + def register_blueprint_contents( + self, + contents: TextIOWrapper, + blueprint_id: BlueprintId | None = None, + ) -> BlueprintId: + """Register blueprint contents with an optional id, allowing contracts to be created from it.""" + code = Code.from_python_code(contents.read(), self._settings) + verifier = OnChainBlueprintVerifier(settings=self._settings) + ocb = OnChainBlueprint(hash=b'', code=code) + verifier.verify_code(ocb) + + return self._register_blueprint_class(ocb.get_blueprint_class(), blueprint_id) + + def build_runner(self) -> TestRunner: + """Create a Runner instance.""" + nc_storage_factory = NCMemoryStorageFactory() + store = MemoryNodeTrieStore() + block_trie = PatriciaTrie(store) + block_storage = NCBlockStorage(block_trie) + return TestRunner( + self.manager.tx_storage, nc_storage_factory, block_storage, settings=self._settings, reactor=self.reactor + ) + + def gen_random_token_uid(self) -> TokenUid: + """Generate a random token UID (32 bytes).""" + token = self._token_index.to_bytes(32, byteorder='big', signed=False) + self._token_index += 1 + return TokenUid(token) + + def gen_random_address(self) -> Address: + """Generate a random wallet address.""" + address, _ = self.gen_random_address_with_key() + return address + + def gen_random_address_with_key(self) -> tuple[Address, KeyPair]: + """Generate a random wallet address with its key.""" + password = self.rng.randbytes(12) + key = KeyPair.create(password) + address_b58 = key.address + address_bytes = decode_address(not_none(address_b58)) + return Address(address_bytes), key + + def gen_random_contract_id(self) -> ContractId: + """Generate a random contract id.""" + return ContractId(VertexId(self.rng.randbytes(32))) + + def gen_random_blueprint_id(self) -> BlueprintId: + """Generate a random contract id.""" + return BlueprintId(self.rng.randbytes(32)) + + def get_genesis_tx(self) -> Transaction: + """Return a genesis transaction.""" + genesis = self.manager.tx_storage.get_all_genesis() + tx = list(tx for tx in genesis if isinstance(tx, Transaction))[0] + return tx + + def create_context( + self, + actions: list[NCAction] | None = None, + vertex: BaseTransaction | VertexData | None = None, + address: Address | None = None, + timestamp: int | None = None, + ) -> Context: + """Create a Context instance with optional values or defaults.""" + return Context( + actions=actions if actions is not None else [], + vertex=vertex or self.get_genesis_tx(), + address=address or self.gen_random_address(), + timestamp=timestamp or self.now, + ) diff --git a/tests/nanocontracts/fields/__init__.py b/tests/nanocontracts/fields/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/nanocontracts/fields/test_compound_field.py b/tests/nanocontracts/fields/test_compound_field.py new file mode 100644 index 000000000..068f7824b --- /dev/null +++ b/tests/nanocontracts/fields/test_compound_field.py @@ -0,0 +1,71 @@ +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.nc_types import TupleNCType, VarInt32NCType +from hathor.transaction import Block, Transaction +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + +INT_VARTUPLE_NC_TYPE = TupleNCType(VarInt32NCType()) + + +class BlueprintWithCompoundField(Blueprint): + dc: dict[str, list[int]] + + @public + def initialize(self, ctx: Context) -> None: + assert self.dc.get('foo', []) == [] + self.dc['foo'] = [1, 2, 3] + self.dc['bar'] = [4, 5, 6, 7] + assert self.dc['foo'] == [1, 2, 3] + assert self.dc['bar'] == [4, 5, 6, 7] + del self.dc['foo'] + try: + self.dc['foo'] + except KeyError as e: + assert e.args[0] == b'dc:\x03foo' + assert 'foo' not in self.dc + assert 'bar' in self.dc + + +class TestDictField(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('unittests') + self.bp_dict = b'1' * 32 + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.bp_dict: BlueprintWithCompoundField, + }) + + def test_dict_field(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc1.nc_id = "{self.bp_dict.hex()}" + nc1.nc_method = initialize() + + nc1 <-- b11 + nc1 <-- b12 + ''') + artifacts.propagate_with(self.manager) + + b11, b12 = artifacts.get_typed_vertices(['b11', 'b12'], Block) + nc1, = artifacts.get_typed_vertices(['nc1'], Transaction) + + assert b11.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().first_block == b11.hash + + b11_storage = self.manager.get_nc_storage(b11, nc1.hash) + + with self.assertRaises(KeyError): + b11_storage.get_obj(b'dc:\x03foo', INT_VARTUPLE_NC_TYPE) + assert b11_storage.get_obj(b'dc:\x03bar', INT_VARTUPLE_NC_TYPE) == (4, 5, 6, 7) + + assert b12.get_metadata().voided_by is None + b12_storage = self.manager.get_nc_storage(b12, nc1.hash) + + with self.assertRaises(KeyError): + b12_storage.get_obj(b'dc:\x03foo', INT_VARTUPLE_NC_TYPE) + assert b12_storage.get_obj(b'dc:\x03bar', INT_VARTUPLE_NC_TYPE) == (4, 5, 6, 7) diff --git a/tests/nanocontracts/fields/test_deque_field.py b/tests/nanocontracts/fields/test_deque_field.py new file mode 100644 index 000000000..662391b6d --- /dev/null +++ b/tests/nanocontracts/fields/test_deque_field.py @@ -0,0 +1,159 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from typing import cast + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.nc_types import VarInt32NCType +from hathor.transaction import Block, Transaction +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + +INT_NC_TYPE = VarInt32NCType() + + +def _test1(dq: deque[int]) -> None: + assert list(dq) == [] + dq.append(1) + dq.appendleft(2) + dq.extend([3, 4]) + dq.extendleft([5, 6]) + assert list(dq) == [6, 5, 2, 1, 3, 4] + assert dq.pop() == 4 + assert dq.popleft() == 6 + assert list(dq) == [5, 2, 1, 3] + assert len(dq) == 4 + dq[1] = 22 + dq[-2] = 11 + assert dq[1] == 22 + assert dq[-2] == 11 + assert list(dq) == [5, 22, 11, 3] + + +def _test2(dq: deque[int]) -> None: + assert list(dq) == [5, 22, 11, 3] + dq.reverse() + assert list(dq) == [3, 11, 22, 5] + dq.append(111) + dq.appendleft(222) + dq.extend([333, 444]) + dq.extendleft([555, 666]) + assert list(dq) == [666, 555, 222, 3, 11, 22, 5, 111, 333, 444] + assert dq.pop() == 444 + assert dq.popleft() == 666 + assert list(dq) == [555, 222, 3, 11, 22, 5, 111, 333] + assert len(dq) == 8 + dq[1] = 2222 + dq[-2] = 1111 + assert dq[1] == 2222 + assert dq[-2] == 1111 + assert list(dq) == [555, 2222, 3, 11, 22, 5, 1111, 333] + + +class BlueprintWithDeque(Blueprint): + dq: deque[int] + + @public + def initialize(self, ctx: Context) -> None: + _test1(self.dq) + + @public + def test(self, ctx: Context) -> None: + _test2(self.dq) + + +class BlueprintWithList(Blueprint): + dq: list[int] + + @public + def initialize(self, ctx: Context) -> None: + _test1(cast(deque, self.dq)) + + @public + def test(self, ctx: Context) -> None: + _test2(cast(deque, self.dq)) + + +class TestDequeField(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('unittests') + self.bp_deque = b'1' * 32 + self.bp_list = b'2' * 32 + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.bp_deque: BlueprintWithDeque, + self.bp_list: BlueprintWithList, + }) + + def _test_deque_field(self, bp_id: bytes) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc1.nc_id = "{bp_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = test() + + nc1 <-- b11 + nc1 <-- nc2 <-- b12 + ''') + artifacts.propagate_with(self.manager) + + b11, b12 = artifacts.get_typed_vertices(['b11', 'b12'], Block) + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + + assert b11.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().first_block == b11.hash + + b11_storage = self.manager.get_nc_storage(b11, nc1.hash) + + with self.assertRaises(KeyError): + b11_storage.get_obj(b'dq:\x7d', INT_NC_TYPE) + assert b11_storage.get_obj(b'dq:\x7e', INT_NC_TYPE) == 5 + assert b11_storage.get_obj(b'dq:\x7f', INT_NC_TYPE) == 22 + assert b11_storage.get_obj(b'dq:\x00', INT_NC_TYPE) == 11 + assert b11_storage.get_obj(b'dq:\x01', INT_NC_TYPE) == 3 + with self.assertRaises(KeyError): + b11_storage.get_obj(b'dq:\x02', INT_NC_TYPE) + + assert b12.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc2.get_metadata().first_block == b12.hash + + b12_storage = self.manager.get_nc_storage(b12, nc1.hash) + + with self.assertRaises(KeyError): + b12_storage.get_obj(b'dq:\x7b', INT_NC_TYPE) + assert b12_storage.get_obj(b'dq:\x7c', INT_NC_TYPE) == 333 + assert b12_storage.get_obj(b'dq:\x7d', INT_NC_TYPE) == 1111 + assert b12_storage.get_obj(b'dq:\x7e', INT_NC_TYPE) == 5 + assert b12_storage.get_obj(b'dq:\x7f', INT_NC_TYPE) == 22 + assert b12_storage.get_obj(b'dq:\x00', INT_NC_TYPE) == 11 + assert b12_storage.get_obj(b'dq:\x01', INT_NC_TYPE) == 3 + assert b12_storage.get_obj(b'dq:\x02', INT_NC_TYPE) == 2222 + assert b12_storage.get_obj(b'dq:\x03', INT_NC_TYPE) == 555 + with self.assertRaises(KeyError): + b12_storage.get_obj(b'dq:\x04', INT_NC_TYPE) + + def test_deque_field_with_deque(self) -> None: + self._test_deque_field(self.bp_deque) + + def test_deque_field_with_list(self) -> None: + self._test_deque_field(self.bp_list) diff --git a/tests/nanocontracts/fields/test_set_field.py b/tests/nanocontracts/fields/test_set_field.py new file mode 100644 index 000000000..81c43ee07 --- /dev/null +++ b/tests/nanocontracts/fields/test_set_field.py @@ -0,0 +1,101 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.nc_types import VarInt32NCType +from hathor.transaction import Block, Transaction +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + +INT_NC_TYPE = VarInt32NCType() + + +class MyBlueprint(Blueprint): + my_set: set[int] + + @public + def initialize(self, ctx: Context) -> None: + assert len(self.my_set) == 0 + self.my_set.add(1) + self.my_set.add(1) + self.my_set.update({1, 2, 3, 4, 5}) + assert len(self.my_set) == 5 + assert 1 in self.my_set + assert 5 in self.my_set + + @public + def test1(self, ctx: Context) -> None: + self.my_set.discard(1) + self.my_set.remove(5) + assert 1 not in self.my_set + assert 5 not in self.my_set + + +class TestDequeField(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('unittests') + self.bp_id = b'x' * 32 + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.bp_id: MyBlueprint + }) + + def test_set_field(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc1.nc_id = "{self.bp_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = test1() + + nc1 <-- b11 + nc1 <-- nc2 <-- b12 + ''') + artifacts.propagate_with(self.manager) + + b11, b12 = artifacts.get_typed_vertices(['b11', 'b12'], Block) + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + + assert b11.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().first_block == b11.hash + + b11_storage = self.manager.get_nc_storage(b11, nc1.hash) + + for i in range(1, 6): + assert b11_storage.get_obj(self._get_key(i), INT_NC_TYPE) == i + + for i in (0, 6): + assert not b11_storage.has_obj(self._get_key(i)) + + assert b12.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc2.get_metadata().first_block == b12.hash + + b12_storage = self.manager.get_nc_storage(b12, nc1.hash) + + for i in range(2, 5): + assert b12_storage.get_obj(self._get_key(i), INT_NC_TYPE) == i + + for i in (1, 5): + assert not b12_storage.has_obj(self._get_key(i)) + + @staticmethod + def _get_key(n: int) -> bytes: + return 'my_set:'.encode() + INT_NC_TYPE.to_bytes(n) diff --git a/tests/nanocontracts/fields/test_storage_deque.py b/tests/nanocontracts/fields/test_storage_deque.py new file mode 100644 index 000000000..17e4da952 --- /dev/null +++ b/tests/nanocontracts/fields/test_storage_deque.py @@ -0,0 +1,372 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque + +import pytest + +from hathor.nanocontracts.fields.deque_field import DequeStorageContainer, _DequeMetadata +from hathor.nanocontracts.nc_types import Int32NCType, StrNCType +from tests.nanocontracts.fields.utils import MockNCStorage + +INT_NC_TYPE = Int32NCType() +STR_NC_TYPE = StrNCType() + + +def test_basic() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + + assert storage.store == {} + assert list(dq) == [] + assert dq.maxlen is None + + +def test_append() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.append('a') + dq.append('b') + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=2, reversed=False), + } + assert list(dq) == ['a', 'b'] + + dq.reverse() + dq.append('c') + + assert storage.store == { + b'dq:\x7f': 'c', + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:__metadata__': _DequeMetadata(first_index=-1, length=3, reversed=True), + } + assert list(dq) == ['b', 'a', 'c'] + + +def test_appendleft() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.appendleft('a') + dq.appendleft('b') + + assert storage.store == { + b'dq:\x7e': 'b', + b'dq:\x7f': 'a', + b'dq:__metadata__': _DequeMetadata(first_index=-2, length=2, reversed=False), + } + assert list(dq) == ['b', 'a'] + + dq.reverse() + dq.appendleft('c') + + assert storage.store == { + b'dq:\x7e': 'b', + b'dq:\x7f': 'a', + b'dq:\x00': 'c', + b'dq:__metadata__': _DequeMetadata(first_index=-2, length=3, reversed=True), + } + assert list(dq) == ['c', 'a', 'b'] + + +def test_extend() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + + dq.extend([1, 2, 3]) + + assert storage.store == { + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=False), + } + assert list(dq) == [1, 2, 3] + + dq.reverse() + dq.extend([4, 5]) + + assert storage.store == { + b'dq:\x7e': 5, + b'dq:\x7f': 4, + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=-2, length=5, reversed=True), + } + assert list(dq) == [3, 2, 1, 4, 5] + + py_dq: deque[int] = deque() + py_dq.extend([1, 2, 3]) + py_dq.reverse() + py_dq.extend([4, 5]) + assert list(py_dq) == list(dq) + + +def test_extendleft() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + + dq.extendleft([1, 2, 3]) + + assert storage.store == { + b'dq:\x7d': 3, + b'dq:\x7e': 2, + b'dq:\x7f': 1, + b'dq:__metadata__': _DequeMetadata(first_index=-3, length=3, reversed=False), + } + assert list(dq) == [3, 2, 1] + + dq.reverse() + dq.extendleft([4, 5]) + + assert storage.store == { + b'dq:\x7d': 3, + b'dq:\x7e': 2, + b'dq:\x7f': 1, + b'dq:\x00': 4, + b'dq:\x01': 5, + b'dq:__metadata__': _DequeMetadata(first_index=-3, length=5, reversed=True), + } + assert list(dq) == [5, 4, 1, 2, 3] + + py_dq: deque[int] = deque() + py_dq.extendleft([1, 2, 3]) + py_dq.reverse() + py_dq.extendleft([4, 5]) + assert list(py_dq) == list(dq) + + +def test_pop() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + dq.extend([1, 2, 3, 4]) + + assert dq.pop() == 4 + assert storage.store == { + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=False), + } + + assert dq.pop() == 3 + assert storage.store == { + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:__metadata__': _DequeMetadata(first_index=0, length=2, reversed=False), + } + + dq.reverse() + + assert dq.pop() == 1 + assert storage.store == { + b'dq:\x01': 2, + b'dq:__metadata__': _DequeMetadata(first_index=1, length=1, reversed=True), + } + + # popping the last element resets the deque + assert dq.pop() == 2 + assert storage.store == {} + + with pytest.raises(IndexError): + dq.pop() + + +def test_popleft() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + dq.extend([1, 2, 3, 4]) + + assert dq.popleft() == 1 + assert storage.store == { + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:\x03': 4, + b'dq:__metadata__': _DequeMetadata(first_index=1, length=3, reversed=False), + } + + assert dq.popleft() == 2 + assert storage.store == { + b'dq:\x02': 3, + b'dq:\x03': 4, + b'dq:__metadata__': _DequeMetadata(first_index=2, length=2, reversed=False), + } + + dq.reverse() + + assert dq.popleft() == 4 + assert storage.store == { + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=2, length=1, reversed=True), + } + + # popping the last element resets the deque + assert dq.popleft() == 3 + assert storage.store == {} + + with pytest.raises(IndexError): + dq.popleft() + + +def test_reverse() -> None: + storage = MockNCStorage() + + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + dq.extend(['a', 'b', 'c']) + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=False), + } + assert list(dq) == ['a', 'b', 'c'] + + dq.reverse() + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=True), + } + assert list(dq) == ['c', 'b', 'a'] + + +def test_indexing() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.extend(['a', 'b', 'c', 'd']) + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=False), + } + assert dq[0] == 'a' + assert dq[1] == 'b' + assert dq[2] == 'c' + assert dq[3] == 'd' + + with pytest.raises(IndexError): + _ = dq[4] + + assert dq[-1] == 'd' + assert dq[-2] == 'c' + assert dq[-3] == 'b' + assert dq[-4] == 'a' + + with pytest.raises(IndexError): + _ = dq[-5] + + dq[1] = 'changed1' + dq[-2] = 'changed2' + + with pytest.raises(IndexError): + dq[4] = 'error' + + with pytest.raises(IndexError): + dq[-5] = 'error' + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'changed1', + b'dq:\x02': 'changed2', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=False), + } + assert dq[1] == 'changed1' + assert dq[-2] == 'changed2' + + with pytest.raises(IndexError): + dq[4] = 'error' + + with pytest.raises(IndexError): + dq[-5] = 'error' + + +def test_indexing_reversed() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.extend(['a', 'b', 'c', 'd']) + dq.reverse() + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=True), + } + assert dq[0] == 'd' + assert dq[1] == 'c' + assert dq[2] == 'b' + assert dq[3] == 'a' + + with pytest.raises(IndexError): + _ = dq[4] + + assert dq[-1] == 'a' + assert dq[-2] == 'b' + assert dq[-3] == 'c' + assert dq[-4] == 'd' + + with pytest.raises(IndexError): + _ = dq[-5] + + dq[1] = 'changed1' + dq[-2] = 'changed2' + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'changed2', + b'dq:\x02': 'changed1', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=True), + } + assert dq[1] == 'changed1' + assert dq[-2] == 'changed2' + + +def test_len() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + assert len(dq) == 0 + + dq.append('a') + assert len(dq) == 1 + + dq.append('b') + assert len(dq) == 2 + + dq.reverse() + assert len(dq) == 2 + + +def test_reverse_empty() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + assert list(dq) == [] + dq.reverse() + assert list(dq) == [] diff --git a/tests/nanocontracts/fields/test_storage_set.py b/tests/nanocontracts/fields/test_storage_set.py new file mode 100644 index 000000000..2253fd2b8 --- /dev/null +++ b/tests/nanocontracts/fields/test_storage_set.py @@ -0,0 +1,113 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +import pytest + +from hathor.nanocontracts.fields.set_field import SetStorageContainer +from hathor.nanocontracts.nc_types import Int32NCType +from tests.nanocontracts.fields.utils import MockNCStorage + +_INT_NC_TYPE = Int32NCType() + + +def test_basic() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + + assert len(my_set) == 0 + assert storage.store == {} + + +def test_add_remove_discard() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + + my_set.add(1) + my_set.add(1) + my_set.add(2) + assert _get_values(storage) == {1, 2} + assert len(my_set) == 2 + + my_set.remove(1) + assert _get_values(storage) == {2} + assert len(my_set) == 1 + + my_set.discard(2) + assert _get_values(storage) == set() + assert len(my_set) == 0 + + my_set.discard(1) + with pytest.raises(KeyError): + my_set.remove(1) + + +def test_updates_and_contains() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + + my_set.update({1, 2, 3}, [2, 3, 4]) + assert _get_values(storage) == {1, 2, 3, 4} + assert len(my_set) == 4 + assert 0 not in my_set + assert 1 in my_set + assert 2 in my_set + assert 3 in my_set + assert 4 in my_set + assert 5 not in my_set + + my_set.difference_update({1, 3}, [4]) + assert _get_values(storage) == {2} + assert len(my_set) == 1 + + +def test_isdisjoint() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + my_set.update({1, 2, 3}) + + assert my_set.isdisjoint(set()) + assert my_set.isdisjoint({4, 5, 6}) + assert my_set.isdisjoint({0, 10}) + assert not my_set.isdisjoint({0, 1, 10, 20}) + assert not my_set.isdisjoint({3}) + + +def test_issuperset() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + my_set.update({1, 2, 3}) + + assert my_set.issuperset({}) + assert my_set.issuperset({1}) + assert my_set.issuperset({1, 2}) + assert my_set.issuperset({1, 2, 3}) + assert not my_set.issuperset({1, 2, 3, 4}) + + +def test_intersection() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + my_set.update({1, 2, 3}) + + assert my_set.intersection(set()) == set() + assert my_set.intersection({1}) == {1} + assert my_set.intersection({1, 2}) == {1, 2} + assert my_set.intersection({1, 2, 3}) == {1, 2, 3} + assert my_set.intersection({1, 2, 3, 4}) == {1, 2, 3} + + +def _get_values(storage: MockNCStorage) -> set[Any]: + return set(value for key, value in storage.store.items() if key != b'my_set:__length__') diff --git a/tests/nanocontracts/fields/utils.py b/tests/nanocontracts/fields/utils.py new file mode 100644 index 000000000..9f900a66c --- /dev/null +++ b/tests/nanocontracts/fields/utils.py @@ -0,0 +1,51 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, TypeVar + +from typing_extensions import override + +from hathor.nanocontracts.nc_types import NCType +from hathor.nanocontracts.storage import NCContractStorage +from hathor.nanocontracts.storage.types import _NOT_PROVIDED + +T = TypeVar('T') +D = TypeVar('D') + + +class MockNCStorage(NCContractStorage): + __slots__ = ('store',) + + def __init__(self) -> None: + self.store: dict[bytes, Any] = {} + + @override + def get_obj(self, key: bytes, value: NCType[T], *, default: D = _NOT_PROVIDED) -> T | D: + if item := self.store.get(key, default): + return item + if default is _NOT_PROVIDED: + raise KeyError + return default + + @override + def put_obj(self, key: bytes, value: NCType[T], data: T) -> None: + self.store[key] = data + + @override + def del_obj(self, key: bytes) -> None: + del self.store[key] + + @override + def has_obj(self, key: bytes) -> bool: + return key in self.store diff --git a/tests/nanocontracts/on_chain_blueprints/__init__.py b/tests/nanocontracts/on_chain_blueprints/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/nanocontracts/on_chain_blueprints/bomb.zlib b/tests/nanocontracts/on_chain_blueprints/bomb.zlib new file mode 100644 index 000000000..5fe4707b4 Binary files /dev/null and b/tests/nanocontracts/on_chain_blueprints/bomb.zlib differ diff --git a/tests/nanocontracts/on_chain_blueprints/test_bet.py b/tests/nanocontracts/on_chain_blueprints/test_bet.py new file mode 100644 index 000000000..03bdf5043 --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_bet.py @@ -0,0 +1,298 @@ +import os +import re +from typing import Any, NamedTuple, Optional + +from hathor.conf import HathorSettings +from hathor.crypto.util import decode_address, get_address_b58_from_public_key_bytes +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.types import ( + NC_INITIALIZE_METHOD, + Address, + Amount, + ContractId, + NCDepositAction, + NCWithdrawalAction, + SignedData, + Timestamp, + TokenUid, + TxOutputScript, + VertexId, +) +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb, sign_pycoin +from hathor.simulator.utils import add_new_blocks +from hathor.transaction import BaseTransaction, Transaction +from hathor.transaction.scripts import P2PKH +from hathor.util import initialize_hd_wallet, not_none +from hathor.wallet import KeyPair +from tests import unittest + +from ...utils import DEFAULT_WORDS +from .. import test_blueprints +from .utils import get_ocb_private_key + +settings = HathorSettings() + +ON_CHAIN_BET_NC_CODE: str = load_builtin_blueprint_for_ocb('bet.py', 'Bet', test_blueprints) +TX_OUTPUT_SCRIPT_NC_TYPE = make_nc_type(TxOutputScript) +RESULT_NC_TYPE: NCType[str | None] = make_nc_type(str | None) # type: ignore[arg-type] +TIMESTAMP_NC_TYPE = make_nc_type(Timestamp) +TOKEN_UID_NC_TYPE = make_nc_type(TokenUid) + + +class BetInfo(NamedTuple): + key: KeyPair + address: Address + amount: Amount + score: str + + +class OnChainBetBlueprintTestCase(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('unittests') + self.wallet = initialize_hd_wallet(DEFAULT_WORDS) + self.token_uid = TokenUid(settings.HATHOR_TOKEN_UID) + self.initialize_contract() # will set self.nc_id, self.runner, self.nc_storage + + def _get_any_tx(self) -> BaseTransaction: + genesis = self.manager.tx_storage.get_all_genesis() + tx = [t for t in genesis if t.is_transaction][0] + return tx + + def _get_any_address(self) -> tuple[Address, KeyPair]: + password = os.urandom(12) + key = KeyPair.create(password) + address_b58 = key.address + address_bytes = Address(decode_address(not_none(address_b58))) + return address_bytes, key + + def get_current_timestamp(self) -> int: + return int(self.clock.seconds()) + + def _make_a_bet(self, amount: int, score: str, *, timestamp: Optional[int] = None) -> BetInfo: + (address_bytes, key) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=amount) + if timestamp is None: + timestamp = self.get_current_timestamp() + context = Context([action], tx, address_bytes, timestamp=timestamp) + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + return BetInfo(key=key, address=Address(address_bytes), amount=Amount(amount), score=score) + + def _set_result(self, result: str, oracle_key: Optional[KeyPair] = None) -> None: + signed_result = SignedData[str](result, b'') + + if oracle_key is None: + oracle_key = self.oracle_key + + result_bytes = signed_result.get_data_bytes(self.nc_id) + signed_result.script_input = oracle_key.p2pkh_create_input_data(b'123', result_bytes) + + tx = self._get_any_tx() + context = Context([], tx, Address(b''), timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'set_result', context, signed_result) + final_result = self.nc_storage.get_obj(b'final_result', RESULT_NC_TYPE) + self.assertEqual(final_result, '2x2') + + def _withdraw(self, address: Address, amount: int) -> None: + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, address, timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def _create_on_chain_blueprint(self, nc_code: str) -> OnChainBlueprint: + from hathor.nanocontracts.on_chain_blueprint import Code + code = Code.from_python_code(nc_code, self._settings) + timestamp = self.manager.tx_storage.latest_timestamp + 1 + parents = self.manager.get_new_tx_parents(timestamp) + blueprint = OnChainBlueprint( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=self.manager.tx_storage, + timestamp=timestamp, + code=code, + ) + blueprint.weight = self.manager.daa.minimum_tx_weight(blueprint) + blueprint.sign(get_ocb_private_key()) + self.manager.cpu_mining_service.resolve(blueprint) + self.manager.reactor.advance(2) + return blueprint + + def _gen_nc_initialize_tx(self, blueprint: OnChainBlueprint, nc_args: list[Any]) -> Transaction: + method_parser = blueprint.get_method(NC_INITIALIZE_METHOD) + timestamp = int(self.manager.reactor.seconds()) + parents = self.manager.get_new_tx_parents() + + nc = Transaction(timestamp=timestamp, parents=parents) + + nc_id = blueprint.blueprint_id() + nc_method = NC_INITIALIZE_METHOD + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + # sign + address = self.wallet.get_unused_address() + private_key = self.wallet.get_private_key(address) + + from hathor.transaction.headers import NanoHeader + nano_header = NanoHeader( + tx=nc, + nc_seqnum=1, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=[], + ) + nc.headers.append(nano_header) + + sign_pycoin(nano_header, private_key) + + # mine + nc.weight = self.manager.daa.minimum_tx_weight(nc) + self.manager.cpu_mining_service.resolve(nc) + + # advance + self.manager.reactor.advance(2) + return nc + + def initialize_contract(self) -> None: + # create on-chain Bet nanocontract + blueprint = self._create_on_chain_blueprint(ON_CHAIN_BET_NC_CODE) + + related_addresses = set(blueprint.get_related_addresses()) + address = get_address_b58_from_public_key_bytes(blueprint.nc_pubkey) + self.assertIn(address, related_addresses) + + assert self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + add_new_blocks(self.manager, 1, advance_clock=30) # confirm the on-chain blueprint vertex + assert blueprint.get_metadata().first_block is not None + + self.oracle_key = KeyPair.create(b'123') + assert self.oracle_key.address is not None + self.oracle_script = P2PKH(self.oracle_key.address).get_script() + self.date_last_bet = self.get_current_timestamp() + 3600 * 24 + + # initialize an on-chain Bet nanocontract + nc_init_tx = self._gen_nc_initialize_tx(blueprint, [self.oracle_script, self.token_uid, self.date_last_bet]) + assert self.manager.vertex_handler.on_new_relayed_vertex(nc_init_tx) + block, = add_new_blocks(self.manager, 1, advance_clock=30) # confirm the initialization nc transaction + assert nc_init_tx.get_metadata().first_block is not None + + # set expected self objects: + self.nc_id = ContractId(VertexId(nc_init_tx.hash)) + self.runner = self.manager.get_nc_runner(block) + self.nc_storage = self.runner.get_storage(self.nc_id) + + def test_blueprint_initialization(self) -> None: + # if initialization was correct we should be able to observe these in the nc_storage: + self.assertEqual(self.nc_storage.get_obj(b'oracle_script', TX_OUTPUT_SCRIPT_NC_TYPE), self.oracle_script) + self.assertEqual(self.nc_storage.get_obj(b'token_uid', TOKEN_UID_NC_TYPE), self.token_uid) + self.assertEqual(self.nc_storage.get_obj(b'date_last_bet', TIMESTAMP_NC_TYPE), self.date_last_bet) + + def test_basic_flow(self) -> None: + runner = self.runner + + tx = self._get_any_tx() + + ### + # Make some bets. + ### + self._make_a_bet(100, '1x1') + self._make_a_bet(200, '1x1') + self._make_a_bet(300, '1x1') + bet1 = self._make_a_bet(500, '2x2') + + ### + # Set the final result. + ### + self._set_result('2x2') + + ### + # Single winner withdraws all funds. + ### + self.assertEqual(1100, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 100) + self.assertEqual(1000, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 1000) + self.assertEqual(0, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + # Out of funds! Any withdrawal must fail from now on... + amount = 1 + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InsufficientBalance', 'withdrawal amount is greater than available (max: 0)'): + runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_with_withdrawal(self) -> None: + self._make_a_bet(100, '1x1') + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('NCForbiddenAction', 'action WITHDRAWAL is forbidden on method `bet`'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_make_a_bet_after_result(self) -> None: + self._make_a_bet(100, '1x1') + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._make_a_bet(100, '1x1') + + def test_make_a_bet_after_date_last_bet(self) -> None: + with self.assertNCFail('TooLate', re.compile(r'cannot place bets after \d+')): + self._make_a_bet(100, '1x1', timestamp=self.date_last_bet + 1) + + def test_set_results_two_times(self) -> None: + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._set_result('5x1') + + def test_set_results_wrong_signature(self) -> None: + wrong_oracle_key = KeyPair.create(b'123') + with self.assertNCFail('InvalidOracleSignature', ''): + self._set_result('3x2', oracle_key=wrong_oracle_key) + + def test_withdraw_before_result(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + with self.assertNCFail('ResultNotAvailable', ''): + self._withdraw(bet1.address, 100) + + def test_withdraw_with_deposits(self) -> None: + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + with self.assertNCFail('NCForbiddenAction', 'action DEPOSIT is forbidden on method `withdraw`'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_wrong_token(self) -> None: + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCDepositAction(token_uid=token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_withdraw_wrong_token(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCWithdrawalAction(token_uid=token_uid, amount=1) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) diff --git a/tests/nanocontracts/on_chain_blueprints/test_custom_builtins.py b/tests/nanocontracts/on_chain_blueprints/test_custom_builtins.py new file mode 100644 index 000000000..02bbda963 --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_custom_builtins.py @@ -0,0 +1,198 @@ +import unittest +from builtins import range as builtin_range + +from hathor.nanocontracts.custom_builtins import custom_range + + +class TestCustomRange(unittest.TestCase): + def compare_ranges(self, custom, builtin): + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + self.assertEqual(custom.start, builtin.start) + self.assertEqual(custom.stop, builtin.stop) + self.assertEqual(custom.step, builtin.step) + + def test_single_argument(self): + custom = custom_range(5) + builtin = builtin_range(5) + self.compare_ranges(custom, builtin) + + def test_two_arguments(self): + custom = custom_range(1, 5) + builtin = builtin_range(1, 5) + self.compare_ranges(custom, builtin) + + def test_three_arguments(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + self.compare_ranges(custom, builtin) + + def test_negative_step(self): + custom = custom_range(10, 1, -2) + builtin = builtin_range(10, 1, -2) + self.compare_ranges(custom, builtin) + + def test_empty_range(self): + cases = [(5, 5), (5, 5, -1), (5, 10, -1)] + for args in cases: + custom = custom_range(*args) + builtin = builtin_range(*args) + self.compare_ranges(custom, builtin) + + def test_len(self): + for args in [(5,), (1, 5), (1, 10, 2), (10, 1, -2)]: + custom = custom_range(*args) + builtin = builtin_range(*args) + self.assertEqual(len(custom), len(builtin)) + + def test_eq(self): + self.assertEqual(custom_range(5), custom_range(0, 5, 1)) + self.assertNotEqual(custom_range(5), custom_range(1, 5)) + self.assertNotEqual(custom_range(1, 10, 2), custom_range(1, 10, 3)) + + def test_contains(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for val in [3, 4, 9, 10]: + self.assertEqual(val in custom, val in builtin) + + def test_index(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for val in [3, 9]: + self.assertEqual(custom.index(val), builtin.index(val)) + with self.assertRaises(ValueError): + custom.index(4) + with self.assertRaises(ValueError): + builtin.index(4) + + def test_count(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for val in [3, 4, 9]: + self.assertEqual(custom.count(val), builtin.count(val)) + + def test_getitem(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for idx in [0, 1, -1]: + self.assertEqual(custom[idx], builtin[idx]) + with self.assertRaises(IndexError): + _ = custom[10] + with self.assertRaises(IndexError): + _ = builtin[10] + + def test_slice_getitem(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + slices = [slice(1, 4), slice(None, None, 2), slice(None, None, -1)] + for sl in slices: + self.compare_ranges(custom[sl], builtin[sl]) + + def test_iter(self): + custom = custom_range(1, 5) + builtin = builtin_range(1, 5) + self.assertEqual(list(iter(custom)), list(iter(builtin))) + + def test_reversed(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + self.assertEqual(list(reversed(custom)), list(reversed(builtin))) + + def test_invalid_arguments(self): + invalid_args = [(1.5,), (1, '10'), (1, 10, '2')] + for args in invalid_args: + with self.assertRaises(TypeError): + custom_range(*args) + with self.assertRaises(TypeError): + builtin_range(*args) + + def test_large_range(self): + # Very large range + custom = custom_range(0, 10**6, 2) + builtin = builtin_range(0, 10**6, 2) + self.assertEqual(len(custom), len(builtin)) + self.assertEqual(custom[-1], builtin[-1]) + + def test_large_negative_step(self): + # Large negative step + custom = custom_range(10**6, 0, -2) + builtin = builtin_range(10**6, 0, -2) + self.assertEqual(len(custom), len(builtin)) + self.assertEqual(custom[-1], builtin[-1]) + + def test_single_element_range(self): + # Single element ranges + custom = custom_range(5, 6) + builtin = builtin_range(5, 6) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_single_element_negative_step(self): + # Single element with negative step + custom = custom_range(6, 5, -1) + builtin = builtin_range(6, 5, -1) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_start_stop_equal(self): + # Start and stop are the same + custom = custom_range(5, 5) + builtin = builtin_range(5, 5) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_step_larger_than_range(self): + # Step size larger than the range + custom = custom_range(1, 5, 10) + builtin = builtin_range(1, 5, 10) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_reverse_single_step(self): + # Negative step with start and stop reversed by one step + custom = custom_range(1, -1, -1) + builtin = builtin_range(1, -1, -1) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_index_out_of_bounds(self): + # Check handling of out-of-bounds indices + custom = custom_range(1, 10, 2) + with self.assertRaises(IndexError): + _ = custom[100] + with self.assertRaises(IndexError): + _ = custom[-100] + + def test_slice_with_large_step(self): + # Slicing with a large step + custom = custom_range(0, 100) + builtin = builtin_range(0, 100) + self.assertEqual(list(custom[::25]), list(builtin[::25])) + + def test_slice_out_of_bounds(self): + # Slicing out of bounds + custom = custom_range(0, 10) + builtin = builtin_range(0, 10) + self.assertEqual(list(custom[10:20]), list(builtin[10:20])) + self.assertEqual(list(custom[-20:-10]), list(builtin[-20:-10])) + + def test_reverse_entire_range(self): + # Reverse the entire range + custom = custom_range(1, 10) + builtin = builtin_range(1, 10) + self.assertEqual(list(reversed(custom)), list(reversed(builtin))) + + def test_step_one(self): + # Step of 1, which should produce a range identical to start-stop + custom = custom_range(1, 10, 1) + builtin = builtin_range(1, 10, 1) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_zero_length_range(self): + # A range with zero length due to the starting conditions + custom = custom_range(10, 0) + builtin = builtin_range(10, 0) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) diff --git a/tests/nanocontracts/on_chain_blueprints/test_script_restrictions.py b/tests/nanocontracts/on_chain_blueprints/test_script_restrictions.py new file mode 100644 index 000000000..e277abe91 --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_script_restrictions.py @@ -0,0 +1,217 @@ +import os + +from hathor.exception import InvalidNewTransaction +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.exception import OCBInvalidScript +from tests import unittest +from tests.nanocontracts.on_chain_blueprints.utils import get_ocb_private_key + + +def _load_file(filename: str) -> bytes: + cur_dir = os.path.dirname(__file__) + filepath = os.path.join(cur_dir, filename) + content = bytearray() + with open(filepath, 'rb') as nc_file: + for line in nc_file.readlines(): + content.extend(line) + return bytes(content) + + +ZLIB_BOMB: bytes = _load_file('bomb.zlib') + + +class OnChainBlueprintScriptTestCase(unittest.TestCase): + def setUp(self): + super().setUp() + self.manager = self.create_peer('unittests') + self.verification_service = self.manager.verification_service + + def _ocb_mine(self, blueprint: OnChainBlueprint) -> None: + self.manager.cpu_mining_service.resolve(blueprint) + self.manager.reactor.advance(2) + + def _create_on_chain_blueprint(self, nc_code: str) -> OnChainBlueprint: + from hathor.nanocontracts.on_chain_blueprint import Code + + code = Code.from_python_code(nc_code, self._settings) + timestamp = self.manager.tx_storage.latest_timestamp + 1 + parents = self.manager.get_new_tx_parents(timestamp) + blueprint = OnChainBlueprint( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=self.manager.tx_storage, + timestamp=timestamp, + code=code, + ) + blueprint.weight = self.manager.daa.minimum_tx_weight(blueprint) + blueprint.sign(get_ocb_private_key()) + self._ocb_mine(blueprint) + return blueprint + + def _test_forbid_syntax(self, code: str, err_msg: str) -> None: + blueprint = self._create_on_chain_blueprint(code) + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + assert isinstance(cm.exception.__cause__, OCBInvalidScript) + assert isinstance(cm.exception.__cause__.__cause__, SyntaxError) + assert cm.exception.args[0] == 'full validation failed: forbidden syntax' + assert cm.exception.__cause__.__cause__.args[0] == err_msg + + def test_forbid_import(self) -> None: + self._test_forbid_syntax( + 'import os', + 'Import statements are not allowed.', + ) + + def test_forbid_import_from(self) -> None: + self._test_forbid_syntax( + 'from os import path', + 'Importing from "os" is not allowed.', + ) + # XXX: only math.ceil and math.floor are currently allowed, log should error + self._test_forbid_syntax( + 'from math import log', + 'Importing "log" from "math" is not allowed.', + ) + + def test_forbid_try_except(self) -> None: + self._test_forbid_syntax( + 'try:\n ...\nexcept:\n ...', + 'Try/Except blocks are not allowed.', + ) + + def test_forbid_names_blacklist(self) -> None: + forbidden_cases = { + '__builtins__': [ + r'''x = __builtins__('dir')''', + r'''y = __builtins__.dir''', + ], + '__import__': [ + r'''sys = __import__('sys')''', + r'''os = __import__('os.path')''', + r'''path = __import__('os.path', fromlist=[None])''', + ], + 'compile': [ + r'''code = compile('print("foo")')''', + ], + 'delattr': [ + '''x = dict()\nx.foo = 1\ndelattr(x, 'foo')''', + ], + 'dir': [ + '''x = dir()''', + ], + 'eval': [ + '''x = eval('1+1')''', + ], + 'exec': [ + '''exec('x=1+1')''', + ], + 'getattr': [ + '''x = dict()\nx.foo = 1\ny = getattr(x, 'foo')''', + ], + 'globals': [ + '''x = 1\ny = globals()['x']''', + ], + 'hasattr': [ + '''x = dict()\ny = hasattr(x, 'foo')''', + ], + 'input': [ + '''x = input()''', + ], + 'locals': [ + '''x = 1\ny = locals()['x']''', + ], + 'open': [ + '''x = open('foo.txt')''', + ], + 'setattr': [ + '''x = dict()\nsetattr(x, 'foo', 1)''', + ], + 'vars': [ + '''x = vars()''', + ], + } + for attr, codes in forbidden_cases.items(): + for code in codes: + self._test_forbid_syntax(code, f'Usage or reference to {attr} is not allowed.') + + def test_forbid_internal_attr(self) -> None: + self._test_forbid_syntax( + 'x = 1\nx.__class__', + 'Access to internal attributes and methods is not allowed.', + ) + self._test_forbid_syntax( + 'x = 1\nx.__runner', + 'Access to internal attributes and methods is not allowed.', + ) + self._test_forbid_syntax( + 'x = 1\nx._Context__runner', + 'Access to internal attributes and methods is not allowed.', + ) + self._test_forbid_syntax( + 'x = log.__entries__', + 'Access to internal attributes and methods is not allowed.', + ) + + def test_forbid_async_fn(self) -> None: + self._test_forbid_syntax( + 'async def foo():\n ...', + 'Async functions are not allowed.', + ) + + def test_forbid_await_syntax(self) -> None: + # XXX: it is normally forbidden to use await outside an async context, and since async functions cannot be + # defined, it isn't possible to make a realistic code that will fail with await (also applies to other + # syntax nodes as'async for' and 'async with'), however the parser will normally accept this because it + # forms a valid syntax tree + self._test_forbid_syntax( + 'x = await foo()', + 'Await is not allowed.', + ) + self._test_forbid_syntax( + 'async for i in range(10):\n ...', + 'Async loops are not allowed.', + ) + self._test_forbid_syntax( + 'async with foo():\n ...', + 'Async contexts are not allowed.', + ) + + def test_blueprint_type_not_a_class(self) -> None: + blueprint = self._create_on_chain_blueprint('''__blueprint__ = "Bet"''') + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + assert isinstance(cm.exception.__cause__, OCBInvalidScript) + assert cm.exception.args[0] == 'full validation failed: __blueprint__ is not a class' + + def test_blueprint_type_not_blueprint_subclass(self) -> None: + blueprint = self._create_on_chain_blueprint('''class Foo:\n ...\n__blueprint__ = Foo''') + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + assert isinstance(cm.exception.__cause__, OCBInvalidScript) + assert cm.exception.args[0] == 'full validation failed: __blueprint__ is not a Blueprint subclass' + + def test_zlib_bomb(self) -> None: + from struct import error as StructError + + from hathor.nanocontracts.on_chain_blueprint import ON_CHAIN_BLUEPRINT_VERSION, CodeKind + from hathor.transaction.util import int_to_bytes + from hathor.transaction.vertex_parser import VertexParser + + blueprint = self._create_on_chain_blueprint('') + code = bytearray() + code.extend(int_to_bytes(ON_CHAIN_BLUEPRINT_VERSION, 1)) + code_type = bytes(CodeKind.PYTHON_ZLIB) + code.extend(int_to_bytes(len(ZLIB_BOMB) + len(code_type) + 1, 4)) + code.extend(code_type) + code.extend(ZLIB_BOMB) + blueprint.serialize_code = lambda: code # type: ignore[method-assign] + serialized_blueprint = bytes(blueprint) + parser = VertexParser(settings=self._settings) + with self.assertRaises(StructError) as cm: + _ = parser.deserialize(serialized_blueprint) + cause = cm.exception.__cause__ + self.assertIsInstance(cause, ValueError) + self.assertEqual(cause.args, ('Decompressed code is too long.',)) diff --git a/tests/nanocontracts/on_chain_blueprints/test_structure.py b/tests/nanocontracts/on_chain_blueprints/test_structure.py new file mode 100644 index 000000000..ee0d16752 --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_structure.py @@ -0,0 +1,45 @@ +from hathor.conf.get_settings import get_global_settings +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb + +from .. import test_blueprints +from .utils import get_ocb_private_key + +# XXX: ON_CHAIN_BET_NC_CODE is not imported from test_bet because test_bet will be refactored out +ON_CHAIN_BET_NC_CODE: str = load_builtin_blueprint_for_ocb('bet.py', 'Bet', test_blueprints) + + +def test_ocb_recompress(): + from hathor.nanocontracts.on_chain_blueprint import Code + from hathor.transaction.vertex_parser import VertexParser + + # XXX: explicitly compression level to confirm that parsing won't re-compress it, since it can't know the + # compression level when decompressing, it must keep the original and thus if it re-compressed it would not + # generate the same sequence + nc_code = ON_CHAIN_BET_NC_CODE + settings = get_global_settings() + # XXX: 3 should be more than enough to make a difference from the default (which is 9) + code = Code.from_python_code(nc_code, settings, compress_level=3) + code2 = Code.from_python_code(nc_code, settings) + # but just to make sure, we test it + assert code.data != code2.data, 'different compression level should yield different results' + ocb = OnChainBlueprint( + weight=1, + inputs=[], + outputs=[], + parents=[ + b'\x01' * 32, + b'\x02' * 32, + ], + timestamp=1234, + code=code, + ) + ocb.weight = 1.234 + ocb.sign(get_ocb_private_key()) + ocb.update_hash() + ocb_bytes = bytes(ocb) + parser = VertexParser(settings=settings) + ocb2 = parser.deserialize(ocb_bytes) + assert ocb == ocb2 + ocb_bytes2 = bytes(ocb2) + assert ocb_bytes == ocb_bytes2 diff --git a/tests/nanocontracts/on_chain_blueprints/utils.py b/tests/nanocontracts/on_chain_blueprints/utils.py new file mode 100644 index 000000000..38072582b --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/utils.py @@ -0,0 +1,10 @@ +from cryptography.hazmat.primitives.asymmetric import ec + +from hathor.wallet import KeyPair +from tests import unittest + + +def get_ocb_private_key() -> ec.EllipticCurvePrivateKey: + """Return the private key used to sign on-chain blueprints on tests.""" + key = KeyPair(unittest.OCB_TEST_PRIVKEY) + return key.get_private_key(unittest.OCB_TEST_PASSWORD) diff --git a/tests/nanocontracts/test_actions.py b/tests/nanocontracts/test_actions.py new file mode 100644 index 000000000..ff165d710 --- /dev/null +++ b/tests/nanocontracts/test_actions.py @@ -0,0 +1,911 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses +from typing import Any +from unittest.mock import patch + +import pytest + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.indexes.tokens_index import TokensIndex +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.exception import NCInvalidAction +from hathor.nanocontracts.nc_exec_logs import NCLogConfig +from hathor.nanocontracts.storage.contract_storage import Balance, BalanceKey +from hathor.nanocontracts.types import NCActionType, TokenUid +from hathor.transaction import Block, Transaction, TxInput, TxOutput +from hathor.transaction.exceptions import InvalidToken +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.util import not_none +from hathor.verification.nano_header_verifier import MAX_ACTIONS_LEN +from hathor.wallet import HDWallet +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.utils import assert_nc_failure_reason, set_nano_header + + +class MyBlueprint(Blueprint): + @public(allow_deposit=True) + def initialize(self, ctx: Context) -> None: + pass + + @public(allow_actions=[ + NCActionType.DEPOSIT, + NCActionType.WITHDRAWAL, + NCActionType.GRANT_AUTHORITY, + NCActionType.ACQUIRE_AUTHORITY, + ]) + def nop(self, ctx: Context) -> None: + pass + + @public + def revoke(self, ctx: Context, token_uid: TokenUid, revoke_mint: bool, revoke_melt: bool) -> None: + self.syscall.revoke_authorities(token_uid=token_uid, revoke_mint=revoke_mint, revoke_melt=revoke_melt) + + @public(allow_deposit=True, allow_withdrawal=True, allow_grant_authority=True) + def mint(self, ctx: Context, token_uid: TokenUid, amount: int) -> None: + self.syscall.mint_tokens(token_uid, amount) + + @public(allow_deposit=True, allow_withdrawal=True) + def melt(self, ctx: Context, token_uid: TokenUid, amount: int) -> None: + self.syscall.melt_tokens(token_uid, amount) + + +class TestActions(unittest.TestCase): + """ + Tests in this file use a hybrid dag builder and manual approach. First, the dag builder is used to setup the + initial state and every vertex that we'll need. Then, we manually manipulate a tx's nano header adding the + required actions and changing inputs/outputs accordingly. + + The dag builder deos not currently support authority actions. Even when it supports them, it's good to keep those + tests manual to make basic assertions without the implicitness of the dag builder. + """ + + def setUp(self) -> None: + super().setUp() + + self.bp_id = b'1' * 32 + self.manager = self.create_peer('unittests', nc_log_config=NCLogConfig.FAILED, wallet_index=True) + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.bp_id: MyBlueprint + }) + assert self.manager.tx_storage.indexes is not None + self.tokens_index: TokensIndex = not_none(self.manager.tx_storage.indexes.tokens) + self.nc_seqnum = 0 + + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + self.artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + + tx0.nc_id = "{self.bp_id.hex()}" + tx0.nc_method = initialize() + tx0.nc_deposit = 1000 HTR + tx0.nc_deposit = 1000 TKA + + # The fact that HTR is in index 0 and TKA is in index 1 is used by tests below. + tx1.out[0] = 10000 HTR + tx1.out[1] = 1000 TKA + + tx2.out[0] = 10000 HTR + tx2.out[1] = 1000 TKA + + b10 < dummy < TKA < tx0 + tx0 <-- tx1 <-- b11 + b11 < tx2 + tx1 <-- tx2 <-- b12 + ''') + + # We only propagate up to tx0. The rest is manipulated and propagated by each test. + self.artifacts.propagate_with(self.manager, up_to='tx0') + + self.b11, self.b12 = self.artifacts.get_typed_vertices(['b11', 'b12'], Block) + self.tx0, self.tx1, self.tx2, self.tka = self.artifacts.get_typed_vertices( + ['tx0', 'tx1', 'tx2', 'TKA'], + Transaction, + ) + + # We finish a manual setup of tx1, so it can be used directly in verification methods. + self.tx1.storage = self.manager.tx_storage + self.tx1.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) + + # Just some constants. + self.htr_balance_key = BalanceKey(nc_id=self.tx0.hash, token_uid=HATHOR_TOKEN_UID) + self.tka_balance_key = BalanceKey(nc_id=self.tx0.hash, token_uid=self.tka.hash) + + # Initial state sanity check. 30 HTR are used to mint 3000 TKA. + self.initial_htr_total = self._settings.GENESIS_TOKENS + 10 * self._settings.INITIAL_TOKENS_PER_BLOCK - 30 + self.initial_tka_total = 3000 + self._assert_token_index(htr_total=self.initial_htr_total, tka_total=self.initial_tka_total) + + def _set_nano_header( + self, + *, + tx: Transaction, + nc_actions: list[NanoHeaderAction] | None = None, + nc_method: str | None = None, + nc_args: tuple[Any, ...] | None = None, + ) -> None: + """Configure a nano header for a tx.""" + wallet = self.dag_builder.get_main_wallet() + assert isinstance(wallet, HDWallet) + set_nano_header( + tx=tx, + wallet=wallet, + nc_id=self.tx0.hash, + nc_actions=nc_actions, + nc_method=nc_method, + nc_args=nc_args, + blueprint=MyBlueprint, + seqnum=self.nc_seqnum + ) + self.nc_seqnum += 1 + + def _change_tx_balance( + self, + *, + tx: Transaction, + update_htr_output: int | None = None, + update_tka_output: int | None = None, + add_inputs: list[TxInput] | None = None, + add_outputs: list[TxOutput] | None = None, + ) -> None: + """ + Modify a tx by optionally changing its HTR and TKA output values, or adding new inputs and outputs, + then re-sign all input scripts. + """ + if update_htr_output is not None: + out = tx.outputs[0] + assert tx.get_token_uid(out.get_token_index()) == HATHOR_TOKEN_UID, ( + 'expected HTR in output index 0' + ) + out.value += update_htr_output + + if update_tka_output is not None: + out = tx.outputs[1] + assert tx.get_token_uid(out.get_token_index()) == self.tka.hash, ( + 'expected TKA in output index 1' + ) + out.value += update_tka_output + + if add_inputs: + tx.inputs.extend(add_inputs) + + if add_outputs: + tx.outputs.extend(add_outputs) + + self.dag_builder._exporter.sign_all_inputs(tx) + + def _get_all_balances(self) -> dict[BalanceKey, Balance]: + return self.manager.get_best_block_nc_storage(self.tx0.hash).get_all_balances() + + def _create_tka_mint_input(self) -> TxInput: + """Return a new TxInput pointing to a TKA mint authority.""" + mint_index = len(self.tka.outputs) - 2 + mint_output: TxOutput = self.tka.outputs[mint_index] + token_uid = self.tka.get_token_uid(mint_output.get_token_index()) + assert token_uid == self.tka.hash and mint_output.can_mint_token(), ( + f'expected the dag builder to generate a mint authority in output index {mint_index}' + ) + return TxInput(tx_id=self.tka.hash, index=mint_index, data=b'') + + def _create_tka_melt_input(self) -> TxInput: + """Return a new TxInput pointing to a TKA melt authority.""" + melt_index = len(self.tka.outputs) - 1 + melt_output: TxOutput = self.tka.outputs[melt_index] + token_uid = self.tka.get_token_uid(melt_output.get_token_index()) + assert token_uid == self.tka.hash and melt_output.can_melt_token(), ( + f'expected the dag builder to generate a melt authority in output index {melt_index}' + ) + return TxInput(tx_id=self.tka.hash, index=melt_index, data=b'') + + def _assert_token_index(self, *, htr_total: int, tka_total: int) -> None: + assert self.tokens_index.get_token_info(HATHOR_TOKEN_UID).get_total() == htr_total + assert self.tokens_index.get_token_info(self.tka.hash).get_total() == tka_total + + def test_deposit_success(self) -> None: + # Add a DEPOSIT action and remove tokens from the HTR output accordingly. + self._change_tx_balance(tx=self.tx1, update_htr_output=-123) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=123), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the added tokens. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1123, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + self._settings.INITIAL_TOKENS_PER_BLOCK, + tka_total=self.initial_tka_total, + ) + + def test_withdrawal_success(self) -> None: + # Add a WITHDRAWAL action and add tokens to the HTR output accordingly. + self._change_tx_balance(tx=self.tx1, update_htr_output=123) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=123), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the removed tokens. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=877, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + self._settings.INITIAL_TOKENS_PER_BLOCK, + tka_total=self.initial_tka_total, + ) + + def test_grant_authority_mint_success(self) -> None: + # Add a GRANT_AUTHORITY action to mint TKA, and add a mint authority input accordingly. + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_mint_input()]) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the mint authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + def test_grant_authority_melt_success(self) -> None: + # Add a GRANT_AUTHORITY action to melt TKA, and add a melt authority input accordingly. + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_melt_input()]) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the melt authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + def test_grant_authority_all_success(self) -> None: + # Add a GRANT_AUTHORITY action to both mint and melt TKA, and add authority inputs accordingly. + self._change_tx_balance( + tx=self.tx1, + add_inputs=[ + self._create_tka_mint_input(), + self._create_tka_melt_input(), + ] + ) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.ALL_AUTHORITIES + ), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with both mint and melt authorities. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=True), + } + + def _test_acquire_authority_to_create_output(self, authority: int) -> None: + token_index = 1 + + # Add an ACQUIRE_AUTHORITY action for TKA, and add a new authority output accordingly, + # both with the provided `authority`. + self._change_tx_balance( + tx=self.tx2, + add_outputs=[ + TxOutput(value=authority, script=b'', token_data=TxOutput.TOKEN_AUTHORITY_MASK | token_index) + ] + ) + self._set_nano_header(tx=self.tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=authority + ), + ]) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + + def test_acquire_authority_create_mint_success(self) -> None: + # Grant a mint authority to the nano contract and use it to create a new mint authority output. + self.test_grant_authority_mint_success() + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MINT_MASK) + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_create_mint_nc_fail(self) -> None: + # Try to create a new mint authority output, but the contract doesn't have that authority. + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MINT_MASK) + + # Check that tx2 fails execution. + assert self.tx2.get_metadata().voided_by == {self.tx2.hash, NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=self.tx2.hash, + block_id=self.b12.hash, + reason=f'NCInvalidAction: cannot acquire mint authority for token {self.tka.hash_hex}' + ) + + def test_acquire_authority_create_melt_success(self) -> None: + # Grant a melt authority to the nano contract and use it to create a new melt authority output. + self.test_grant_authority_melt_success() + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MELT_MASK) + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_create_melt_nc_fail(self) -> None: + # Try to create a new melt authority output, but the contract doesn't have that authority. + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MELT_MASK) + + # Check that tx2 fails execution. + assert self.tx2.get_metadata().voided_by == {self.tx2.hash, NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=self.tx2.hash, + block_id=self.b12.hash, + reason=f'NCInvalidAction: cannot acquire melt authority for token {self.tka.hash_hex}' + ) + + def test_acquire_authority_create_all_success(self) -> None: + # Grant all authorities to the nano contract and use it to create a new all authorities output. + self.test_grant_authority_all_success() + self._test_acquire_authority_to_create_output(TxOutput.ALL_AUTHORITIES) + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_create_all_nc_fail(self) -> None: + # Try to create a new all authorities output, but the contract doesn't have any authorities. + self._test_acquire_authority_to_create_output(TxOutput.ALL_AUTHORITIES) + + # Check that tx2 fails execution. + assert self.tx2.get_metadata().voided_by == {self.tx2.hash, NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=self.tx2.hash, + block_id=self.b12.hash, + reason=f'NCInvalidAction: cannot acquire mint authority for token {self.tka.hash_hex}' + ) + + def test_acquire_authority_mint_tokens_success(self) -> None: + # Grant a mint authority to the nano contract and use it to mint tokens. + self.test_grant_authority_mint_success() + + # Add an ACQUIRE_AUTHORITY action for TKA, minting new TKA, and updating the HTR balance accordingly. + self._change_tx_balance( + tx=self.tx2, + update_htr_output=-10, + update_tka_output=1000, + ) + self._set_nano_header(tx=self.tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_melt_tokens_success(self) -> None: + # Grant a melt authority to the nano contract and use it to melt tokens. + self.test_grant_authority_melt_success() + + # Add an ACQUIRE_AUTHORITY action for TKA, melting TKA, and updating the HTR balance accordingly. + self._change_tx_balance( + tx=self.tx2, + update_htr_output=5, + update_tka_output=-500, + ) + self._set_nano_header(tx=self.tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_mint_tokens_success(self) -> None: + # Grant a TKA mint authority to the nano contract and then use it to mint tokens. + self.test_grant_authority_mint_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Add actions so both minted tokens and htr used to mint tokens are in/from the tx outputs/inputs. + self._change_tx_balance(tx=self.tx2, update_htr_output=-200, update_tka_output=20000) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=20000), + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=200), + ], + nc_method='mint', + nc_args=(self.tka.hash, 20000), + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is unchanged because both + # minted tokens and HTR used to mint in/were from tx outputs/inputs. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK - 200, + tka_total=self.initial_tka_total + 20000, + ) + + def test_grant_and_mint_same_tx_success(self) -> None: + # Add a GRANT_AUTHORITY action to mint TKA, and add a mint authority input accordingly. + # Also add a call to mint + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_mint_input()]) + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + ], + nc_method='mint', + nc_args=(self.tka.hash, 200) + ) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the mint authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=998, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1200, can_mint=True, can_melt=False), + } + + def test_mint_tokens_keep_in_contract_success(self) -> None: + # Grant a TKA mint authority to the nano contract and then use it to mint tokens. + self.test_grant_authority_mint_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Add a deposit action, paying for HTR with the input and keeping the minted token in the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=-200) + self._set_nano_header( + tx=self.tx2, + nc_actions=[NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=200)], + nc_method='mint', + nc_args=(self.tka.hash, 20000) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=21000, can_mint=True, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK - 200, + tka_total=self.initial_tka_total + 20000, + ) + + def test_mint_tokens_and_partial_withdrawal_success(self) -> None: + # Grant a TKA mint authority to the nano contract and then use it to mint tokens. + self.test_grant_authority_mint_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Add actions paying for HTR with the input and withdrawing part of the minted token from the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=-200, update_tka_output=10000) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10000), + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=200), + ], + nc_method='mint', + nc_args=(self.tka.hash, 20000) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=11000, can_mint=True, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK - 200, + tka_total=self.initial_tka_total + 20000, + ) + + def test_melt_tokens_success(self) -> None: + # Grant a TKA melt authority to the nano contract and then use it to melt tokens. + self.test_grant_authority_melt_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Add actions so both melted tokens and htr received from melt are from/in the tx inputs/outputs. + self._change_tx_balance(tx=self.tx2, update_htr_output=5, update_tka_output=-500) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=1, amount=500), + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=5), + ], + nc_method='melt', + nc_args=(self.tka.hash, 500) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is unchanged because both + # melted tokens and HTR received are from/in the tx inputs/outputs. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK + 5, + tka_total=self.initial_tka_total - 500, + ) + + def test_melt_tokens_from_contract_success(self) -> None: + # Grant a TKA melt authority to the nano contract and then use it to melt tokens. + self.test_grant_authority_melt_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Add a withdrawal action receiving the HTR from the melt in the output and melting the tokens in the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=5) + self._set_nano_header( + tx=self.tx2, + nc_actions=[NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=5)], + nc_method='melt', + nc_args=(self.tka.hash, 500) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=500, can_mint=False, can_melt=True), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK + 5, + tka_total=self.initial_tka_total - 500, + ) + + def test_melt_tokens_from_contract_and_input_success(self) -> None: + # Grant a TKA melt authority to the nano contract and then use it to melt tokens. + self.test_grant_authority_melt_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Add actions so part of the tokens are melted from inputs and part from the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=5, update_tka_output=-250) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=1, amount=250), + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=5), + ], + nc_method='melt', + nc_args=(self.tka.hash, 500) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=750, can_mint=False, can_melt=True), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK + 5, + tka_total=self.initial_tka_total - 500, + ) + + def test_acquire_and_grant_same_token_not_allowed(self) -> None: + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + ], + ) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == f'conflicting actions for token {self.tka.hash_hex}' + + def test_grant_and_acquire_same_token_not_allowed(self) -> None: + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + NanoHeaderAction(type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + ], + ) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == f'conflicting actions for token {self.tka.hash_hex}' + + def test_conflicting_actions(self) -> None: + # Add 2 conflicting actions for the same token. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=1), + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=2), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == 'conflicting actions for token 00' + + def test_non_conflicting_actions_success(self) -> None: + # Add a GRANT_AUTHORITY action to mint TKA, and add a mint authority input accordingly. + # Also add a DEPOSIT action with the same token and update the tx output accordingly. + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_mint_input()]) + self._change_tx_balance(tx=self.tx1, update_tka_output=-100) + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=1, amount=100), + ], + ) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the mint authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1100, can_mint=True, can_melt=False), + } + + def test_token_index_not_found(self) -> None: + # Add an action with a token index out of bounds. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=2, amount=1), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1, self.verification_params) + assert str(e.value) == 'DEPOSIT token index 2 not found' + + def test_token_uid_not_in_list(self) -> None: + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=1), + ]) + + nano_header = self.tx1.get_nano_header() + actions = nano_header.get_actions() + + # Here I have to fake and patch get_actions() with an invalid + # one because the nano header always creates valid token uids. + fake_token_uid = b'\1' * 32 + fake_actions = [dataclasses.replace(actions[0], token_uid=TokenUid(fake_token_uid))] + + with patch('hathor.transaction.headers.NanoHeader.get_actions', lambda _: fake_actions): + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == f'DEPOSIT action requires token {fake_token_uid.hex()} in tokens list' + + def _test_invalid_unknown_authority(self, action_type: NCActionType) -> None: + # Create an authority action with an unknown authority. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=action_type, token_index=1, amount=TxOutput.ALL_AUTHORITIES + 1), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1, self.verification_params) + assert str(e.value) == f'action {action_type.name} token {self.tka.hash_hex} invalid authorities: 0b100' + + def _test_invalid_htr_authority(self, action_type: NCActionType) -> None: + # Create an authority action for HTR. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=action_type, token_index=0, amount=TxOutput.TOKEN_MINT_MASK), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1, self.verification_params) + assert str(e.value) == f'{action_type.name} action cannot be executed on HTR token' + + def test_invalid_grant_unknown_authority(self) -> None: + self._test_invalid_unknown_authority(NCActionType.GRANT_AUTHORITY) + + def test_invalid_acquire_unknown_authority(self) -> None: + self._test_invalid_unknown_authority(NCActionType.ACQUIRE_AUTHORITY) + + def test_invalid_grant_htr_authority(self) -> None: + self._test_invalid_htr_authority(NCActionType.GRANT_AUTHORITY) + + def test_invalid_acquire_htr_authority(self) -> None: + self._test_invalid_htr_authority(NCActionType.ACQUIRE_AUTHORITY) + + def test_grant_authority_cannot_mint(self) -> None: + # Try to grant a TKA mint authority without an authority input. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, + token_index=1, + amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1, self.verification_params) + assert str(e.value) == f'GRANT_AUTHORITY token {self.tka.hash_hex} requires mint, but no input has it' + + def test_grant_authority_cannot_melt(self) -> None: + # Try to grant a TKA melt authority without an authority input. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, + token_index=1, + amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1, self.verification_params) + assert str(e.value) == f'GRANT_AUTHORITY token {self.tka.hash_hex} requires melt, but no input has it' + + def test_acquire_authority_cannot_mint_with_melt(self) -> None: + # Try to create a mint authority output with an action to acquire a melt authority. + self._change_tx_balance( + tx=self.tx1, + add_outputs=[ + TxOutput(value=TxOutput.TOKEN_MINT_MASK, script=b'', token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1) + ] + ) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + with pytest.raises(InvalidToken, match='output at index 2 has mint authority, but no input has it'): + self.manager.verification_service.verify(self.tx1, self.verification_params) + + def test_use_authority_cannot_melt_with_mint(self) -> None: + # Try to create a melt authority output with an action to acquire a mint authority. + self._change_tx_balance( + tx=self.tx1, + add_outputs=[ + TxOutput(value=TxOutput.TOKEN_MELT_MASK, script=b'', token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1) + ] + ) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + with pytest.raises(InvalidToken, match='output at index 2 has melt authority, but no input has it'): + self.manager.verification_service.verify(self.tx1, self.verification_params) + + def test_actions_max_len_fail(self) -> None: + # Try to create too many actions. + action = NanoHeaderAction(type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=1) + actions = [action] * (MAX_ACTIONS_LEN + 1) + + self._set_nano_header(tx=self.tx1, nc_actions=actions) + + with pytest.raises(NCInvalidAction, match='more actions than the max allowed: 17 > 16'): + self.manager.verification_service.verify(self.tx1, self.verification_params) diff --git a/tests/nanocontracts/test_all_fields.py b/tests/nanocontracts/test_all_fields.py new file mode 100644 index 000000000..d7eebb59f --- /dev/null +++ b/tests/nanocontracts/test_all_fields.py @@ -0,0 +1,163 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import BlueprintSyntaxError +from hathor.nanocontracts.types import BlueprintId, VertexId, public +from hathor.transaction import Block, Transaction +from hathor.transaction.nc_execution_state import NCExecutionState +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.test_blueprints.all_fields import AllFieldsBlueprint + + +class TestAllFields(unittest.TestCase): + def test_all_fields_builtin(self) -> None: + manager = self.create_peer('unittests') + blueprint_id = BlueprintId(VertexId(b'\x01' * 32)) + manager.tx_storage.nc_catalog.blueprints[blueprint_id] = AllFieldsBlueprint + + dag_builder = TestDAGBuilder.from_manager(manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..11] + b10 < dummy + + nc1.nc_id = "{blueprint_id.hex()}" + nc1.nc_method = initialize() + nc1 <-- b11 + ''') + artifacts.propagate_with(manager) + + b11 = artifacts.get_typed_vertex('b11', Block) + nc1 = artifacts.get_typed_vertex('nc1', Transaction) + + assert b11.get_metadata().voided_by is None + + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().first_block == b11.hash + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + + def test_all_fields_ocb(self) -> None: + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + manager = self.create_peer('unittests') + dag_builder = TestDAGBuilder.from_manager(manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + ocb1.ocb_code = all_fields.py, AllFieldsBlueprint + ocb1 <-- b11 + + nc1.nc_id = ocb1 + nc1.nc_method = initialize() + nc1 <-- b12 + ''') + artifacts.propagate_with(manager) + + b11, b12 = artifacts.get_typed_vertices(['b11', 'b12'], Block) + ocb1 = artifacts.get_typed_vertex('ocb1', OnChainBlueprint) + nc1 = artifacts.get_typed_vertex('nc1', Transaction) + + assert b11.get_metadata().voided_by is None + + assert ocb1.get_metadata().voided_by is None + assert ocb1.get_metadata().first_block == b11.hash + + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().first_block == b12.hash + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + + def test_no_named_tuple_type(self) -> None: + from typing import NamedTuple + + with self.assertRaises(BlueprintSyntaxError) as cm: + class MyInvalidBlueprint(Blueprint): + invalid_attribute: NamedTuple + + @public + def initialize(self, ctx: Context) -> None: + pass + + assert cm.exception.args[0] == 'unsupported field type: `invalid_attribute: NamedTuple`' + context_exception = cm.exception.__context__ + assert isinstance(context_exception, TypeError) + assert re.match( + r'type is not supported by any Field class', + context_exception.args[0] + ) + + def test_no_bytearray(self) -> None: + with self.assertRaises(BlueprintSyntaxError) as cm: + class MyInvalidBlueprint(Blueprint): + invalid_attribute: bytearray + + @public + def initialize(self, ctx: Context) -> None: + pass + + assert cm.exception.args[0] == 'unsupported field type: `invalid_attribute: bytearray`' + context_exception = cm.exception.__context__ + assert isinstance(context_exception, TypeError) + assert context_exception.args[0] == r"type is not supported by any Field class" + + def test_no_typing_union(self) -> None: + from typing import Union + + with self.assertRaises(BlueprintSyntaxError) as cm: + class MyInvalidBlueprint(Blueprint): + invalid_attribute: Union[str, int] + + @public + def initialize(self, ctx: Context) -> None: + pass + + assert cm.exception.args[0] == 'unsupported field type: `invalid_attribute: typing.Union[str, int]`' + context_exception = cm.exception.__context__ + assert isinstance(context_exception, TypeError) + assert context_exception.args[0] == r"type typing.Union[str, int] is not supported by any Field class" + + def test_no_union_type(self) -> None: + with self.assertRaises(BlueprintSyntaxError) as cm: + class MyInvalidBlueprint(Blueprint): + invalid_attribute: str | int + + @public + def initialize(self, ctx: Context) -> None: + pass + + assert cm.exception.args[0] == 'unsupported field type: `invalid_attribute: str | int`' + context_exception = cm.exception.__context__ + assert isinstance(context_exception, TypeError) + assert context_exception.args[0] == r"type str | int is not supported by any Field class" + + def test_no_none(self) -> None: + with self.assertRaises(BlueprintSyntaxError) as cm: + class MyInvalidBlueprint(Blueprint): + invalid_attribute: None + + @public + def initialize(self, ctx: Context) -> None: + pass + + assert cm.exception.args[0] == 'unsupported field type: `invalid_attribute: None`' + context_exception = cm.exception.__context__ + assert isinstance(context_exception, TypeError) + assert context_exception.args[0] == r"type None is not supported by any Field class" diff --git a/tests/nanocontracts/test_allowed_actions.py b/tests/nanocontracts/test_allowed_actions.py new file mode 100644 index 000000000..0841bd2bb --- /dev/null +++ b/tests/nanocontracts/test_allowed_actions.py @@ -0,0 +1,150 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.exception import BlueprintSyntaxError, NCForbiddenAction +from hathor.nanocontracts.types import ( + NCAcquireAuthorityAction, + NCAction, + NCActionType, + NCArgs, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, + fallback, +) +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + pass + + @public(allow_deposit=True) + def deposit(self, ctx: Context) -> None: + pass + + @public(allow_withdrawal=True) + def withdrawal(self, ctx: Context) -> None: + pass + + @public(allow_grant_authority=True) + def grant_authority(self, ctx: Context) -> None: + pass + + @public(allow_acquire_authority=True) + def acquire_authority(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> None: + pass + + +class TestAllowedActions(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.contract_id = self.gen_random_contract_id() + + self.token_a = self.gen_random_token_uid() + self.address = self.gen_random_address() + self.tx = self.get_genesis_tx() + + self.all_actions: set[NCAction] = { + NCDepositAction(token_uid=self.token_a, amount=123), + NCWithdrawalAction(token_uid=self.token_a, amount=123), + NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=True), + NCAcquireAuthorityAction(token_uid=self.token_a, mint=True, melt=True), + } + + all_actions_types = [action.type for action in self.all_actions] + for action_type in NCActionType: + # To make sure we remember to test new action types when we implement them + assert action_type in all_actions_types, f'missing {action_type.name}' + + def _get_context(self, *actions: NCAction) -> Context: + return Context( + actions=list(actions), + vertex=self.tx, + address=self.address, + timestamp=self.now, + ) + + def test_no_actions_allowed(self) -> None: + self.runner.create_contract(self.contract_id, self.blueprint_id, self._get_context()) + for action in self.all_actions: + ctx = self._get_context(action) + + # Test on public method + with pytest.raises(NCForbiddenAction, match=f'action {action.name} is forbidden on method `nop`'): + self.runner.call_public_method(self.contract_id, 'nop', ctx) + + # Test on fallback method + with pytest.raises(NCForbiddenAction, match=f'action {action.name} is forbidden on method `fallback`'): + self.runner.call_public_method(self.contract_id, 'unknown', ctx) + + def test_conflicting_params(self) -> None: + msg = 'use only one of `allow_actions` or per-action flags: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class InvalidBlueprint(Blueprint): + @public(allow_deposit=True, allow_actions=[NCActionType.DEPOSIT]) + def initialize(self, ctx: Context) -> None: + pass + + def test_allow_specific_action_on_public(self) -> None: + for allowed_action in self.all_actions: + runner = self.build_runner() + runner.create_contract(self.contract_id, self.blueprint_id, self._get_context()) + method_name = allowed_action.name.lower() + forbidden_actions = self.all_actions.difference({allowed_action}) + + for forbidden_action in forbidden_actions: + msg = f'action {forbidden_action.name} is forbidden on method `{method_name}`' + ctx = self._get_context(forbidden_action) + with pytest.raises(NCForbiddenAction, match=msg): + runner.call_public_method(self.contract_id, method_name, ctx) + + def test_allow_specific_action_on_fallback(self) -> None: + for allowed_action in self.all_actions: + class MyOtherBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback(allow_actions=[allowed_action.type]) + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> None: + pass + + runner = self.build_runner() + blueprint_id = self._register_blueprint_class(MyOtherBlueprint) + runner.create_contract(self.contract_id, blueprint_id, self._get_context()) + method_name = allowed_action.name.lower() + forbidden_actions = self.all_actions.difference({allowed_action}) + + for forbidden_action in forbidden_actions: + msg = f'action {forbidden_action.name} is forbidden on method `fallback`' + ctx = self._get_context(forbidden_action) + with pytest.raises(NCForbiddenAction, match=msg): + runner.call_public_method(self.contract_id, method_name, ctx) diff --git a/tests/nanocontracts/test_authorities_call_another.py b/tests/nanocontracts/test_authorities_call_another.py new file mode 100644 index 000000000..e4bec4867 --- /dev/null +++ b/tests/nanocontracts/test_authorities_call_another.py @@ -0,0 +1,304 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.exception import NCInvalidAction +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import ContractId, NCAcquireAuthorityAction, NCAction, NCGrantAuthorityAction, TokenUid +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class CalleeBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public(allow_grant_authority=True, allow_acquire_authority=True) + def nop(self, ctx: Context) -> None: + pass + + @public + def revoke_from_self(self, ctx: Context, token_uid: TokenUid, mint: bool, melt: bool) -> None: + self.syscall.revoke_authorities(token_uid, revoke_mint=mint, revoke_melt=melt) + + @public + def grant_all_to_other(self, ctx: Context, contract_id: ContractId, token_uid: TokenUid) -> None: + action = NCGrantAuthorityAction(token_uid=token_uid, mint=True, melt=True) + self.syscall.call_public_method(contract_id, 'nop', [action]) + + @public + def revoke_all_from_other(self, ctx: Context, contract_id: ContractId, token_uid: TokenUid) -> None: + self.syscall.call_public_method(contract_id, 'revoke_from_self', [], token_uid, True, True) + + +class CallerBlueprint(Blueprint): + other_id: ContractId + + @public(allow_grant_authority=True) + def initialize(self, ctx: Context, other_id: ContractId) -> None: + self.other_id = other_id + + @public(allow_grant_authority=True) + def nop(self, ctx: Context) -> None: + pass + + @public + def grant_to_other(self, ctx: Context, token_uid: TokenUid, mint: bool, melt: bool) -> None: + action = NCGrantAuthorityAction(token_uid=token_uid, mint=mint, melt=melt) + self.syscall.call_public_method(self.other_id, 'nop', [action]) + + @public(allow_grant_authority=True) + def revoke_from_self(self, ctx: Context, token_uid: TokenUid, mint: bool, melt: bool) -> None: + self.syscall.revoke_authorities(token_uid, revoke_mint=mint, revoke_melt=melt) + + @public + def revoke_from_other(self, ctx: Context, token_uid: TokenUid, mint: bool, melt: bool) -> None: + self.syscall.call_public_method(self.other_id, 'revoke_from_self', [], token_uid, mint, melt) + + @public + def acquire_another(self, ctx: Context, token_uid: TokenUid, mint: bool, melt: bool) -> None: + action = NCAcquireAuthorityAction(token_uid=token_uid, mint=mint, melt=melt) + self.syscall.call_public_method(self.other_id, 'nop', [action]) + + @public + def call_grant_all_to_other_then_revoke(self, ctx: Context, token_uid: TokenUid) -> None: + self.syscall.revoke_authorities(token_uid, revoke_mint=True, revoke_melt=True) + assert not self.syscall.can_mint(token_uid) + assert not self.syscall.can_melt(token_uid) + self.syscall.call_public_method( + self.other_id, + 'grant_all_to_other', + actions=[], + contract_id=self.syscall.get_contract_id(), + token_uid=token_uid, + ) + assert self.syscall.can_mint(token_uid) + assert self.syscall.can_melt(token_uid) + self.syscall.revoke_authorities(token_uid, revoke_mint=True, revoke_melt=True) + assert not self.syscall.can_mint(token_uid) + assert not self.syscall.can_melt(token_uid) + + @public(allow_grant_authority=True) + def call_revoke_all_from_other(self, ctx: Context, token_uid: TokenUid) -> None: + assert self.syscall.can_mint(token_uid) + assert self.syscall.can_melt(token_uid) + self.syscall.call_public_method( + self.other_id, + 'revoke_all_from_other', + actions=[], + contract_id=self.syscall.get_contract_id(), + token_uid=token_uid, + ) + assert not self.syscall.can_mint_before_current_call(token_uid) + assert not self.syscall.can_melt_before_current_call(token_uid) + + +class TestAuthoritiesCallAnother(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.callee_blueprint_id = self.gen_random_blueprint_id() + self.caller_blueprint_id = self.gen_random_blueprint_id() + + self.nc_catalog.blueprints[self.callee_blueprint_id] = CalleeBlueprint + self.nc_catalog.blueprints[self.caller_blueprint_id] = CallerBlueprint + + self.callee_id = self.gen_random_contract_id() + self.caller_id = self.gen_random_contract_id() + + self.token_a = self.gen_random_token_uid() + self.address = self.gen_random_address() + self.tx = self.get_genesis_tx() + + def _initialize(self, caller_actions: list[NCAction] | None = None) -> None: + caller_ctx = Context( + actions=caller_actions or [], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + callee_ctx = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.create_contract(self.caller_id, self.caller_blueprint_id, caller_ctx, other_id=self.callee_id) + self.runner.create_contract(self.callee_id, self.callee_blueprint_id, callee_ctx) + self.caller_storage = self.runner.get_storage(self.caller_id) + self.callee_storage = self.runner.get_storage(self.callee_id) + + def _grant_to_other(self, *, mint: bool, melt: bool) -> None: + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method( + self.caller_id, 'grant_to_other', context, token_uid=self.token_a, mint=mint, melt=melt + ) + + def _revoke_from_self(self, contract_id: ContractId, *, actions: list[NCAction], mint: bool, melt: bool) -> None: + context = Context( + actions=actions, + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method( + contract_id, 'revoke_from_self', context, token_uid=self.token_a, mint=mint, melt=melt + ) + + def _revoke_from_other(self, *, mint: bool, melt: bool) -> None: + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method( + self.caller_id, 'revoke_from_other', context, token_uid=self.token_a, mint=mint, melt=melt + ) + + def test_grant_mint_success(self) -> None: + self._initialize(caller_actions=[NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=False)]) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + self._grant_to_other(mint=True, melt=False) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=True, can_melt=False) + + def test_revoke_mint_success(self) -> None: + self.test_grant_mint_success() + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=True, can_melt=False) + self._revoke_from_other(mint=True, melt=False) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + def test_grant_mint_fail(self) -> None: + self._initialize() + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + msg = f'GRANT_AUTHORITY token {self.token_a.hex()} requires mint, but contract does not have that authority' + with pytest.raises(NCInvalidAction, match=msg): + self._grant_to_other(mint=True, melt=False) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + def test_grant_melt_success(self) -> None: + self._initialize(caller_actions=[NCGrantAuthorityAction(token_uid=self.token_a, mint=False, melt=True)]) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + self._grant_to_other(mint=False, melt=True) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=True) + + def test_revoke_melt_success(self) -> None: + self.test_grant_melt_success() + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=True) + self._revoke_from_other(mint=False, melt=True) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + def test_grant_melt_fail(self) -> None: + self._initialize() + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + msg = f'GRANT_AUTHORITY token {self.token_a.hex()} requires melt, but contract does not have that authority' + with pytest.raises(NCInvalidAction, match=msg): + self._grant_to_other(mint=False, melt=True) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + def test_acquire_mint(self) -> None: + self._initialize() + context = Context( + actions=[NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=False)], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method(self.callee_id, 'nop', context) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=True, can_melt=False) + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method( + self.caller_id, 'acquire_another', context, token_uid=self.token_a, mint=True, melt=False + ) + + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=True, can_melt=False) + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=True, can_melt=False) + + def test_acquire_melt(self) -> None: + self._initialize() + context = Context( + actions=[NCGrantAuthorityAction(token_uid=self.token_a, mint=False, melt=True)], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method(self.callee_id, 'nop', context) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=True) + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method( + self.caller_id, 'acquire_another', context, token_uid=self.token_a, mint=False, melt=True + ) + + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=True) + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=True) + + def test_grant_and_revoke_single_contract(self) -> None: + self._initialize() + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + self._revoke_from_self( + self.caller_id, + actions=[NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=True)], + mint=True, + melt=True, + ) + # actions run before the method, so the final result is revoked. + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + def test_revoke_then_grant_same_call_another_contract(self) -> None: + self._initialize(caller_actions=[NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=True)]) + self._grant_to_other(mint=True, melt=True) + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=True, can_melt=True) + assert self.callee_storage.get_balance(self.token_a) == Balance(value=0, can_mint=True, can_melt=True) + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method(self.caller_id, 'call_grant_all_to_other_then_revoke', context, self.token_a) + # the main call calls the revoke syscall last, so the final result is revoked. + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) + + def test_grant_then_revoke_same_call_another_contract(self) -> None: + self._initialize() + context = Context( + actions=[NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=True)], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.call_public_method(self.caller_id, 'call_revoke_all_from_other', context, self.token_a) + # actions run before the method, so the final result is revoked. + assert self.caller_storage.get_balance(self.token_a) == Balance(value=0, can_mint=False, can_melt=False) diff --git a/tests/nanocontracts/test_authorities_index.py b/tests/nanocontracts/test_authorities_index.py new file mode 100644 index 000000000..c9504c03e --- /dev/null +++ b/tests/nanocontracts/test_authorities_index.py @@ -0,0 +1,325 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.exception import NanoContractDoesNotExist +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import ContractId, NCAcquireAuthorityAction, NCActionType, TokenUid, VertexId +from hathor.nanocontracts.utils import derive_child_token_id +from hathor.transaction import Block, Transaction, TxOutput +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.wallet import HDWallet +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.utils import set_nano_header + + +class MyBlueprint(Blueprint): + token_uid: TokenUid | None + + @public(allow_grant_authority=True) + def initialize(self, ctx: Context) -> None: + self.token_uid = None + + @public + def revoke_all(self, ctx: Context, token_uid: TokenUid | None) -> None: + if token_uid is None: + assert self.token_uid is not None + token_uid = self.token_uid + self.syscall.revoke_authorities(token_uid, revoke_mint=True, revoke_melt=True) + + @public(allow_deposit=True) + def create_token(self, ctx: Context) -> None: + self.token_uid = self.syscall.create_token(token_name='token a', token_symbol='TKA', amount=1000) + + @public(allow_acquire_authority=True) + def allow_acquire_authority(self, ctx: Context) -> None: + pass + + @public + def acquire_authority(self, ctx: Context, other_id: ContractId) -> None: + self.token_uid = derive_child_token_id(other_id, 'TKA') + actions = [NCAcquireAuthorityAction(token_uid=self.token_uid, mint=True, melt=True)] + self.syscall.call_public_method(other_id, 'allow_acquire_authority', actions) + + +class TestAuthoritiesIndex(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.tokens_index = self.manager.tx_storage.indexes.tokens + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + + wallet = self.dag_builder.get_main_wallet() + assert isinstance(wallet, HDWallet) + self.wallet = wallet + + def test_grant_action_then_revoke(self) -> None: + artifacts = self.dag_builder.build_from_str(''' + blockchain genesis b[1..12] + b10 < dummy < TKA + + tx1.out[0] = 1000 TKA # To force TKA to be a token creation tx + + TKA <-- b11 + tx1 <-- b12 + ''') + artifacts.propagate_with(self.manager, up_to='dummy') + tka, tx1 = artifacts.get_typed_vertices(['TKA', 'tx1'], Transaction) + + # Remove authority outputs so no UTXOs have them + assert tka.outputs[-1].is_token_authority() + assert tka.outputs[-2].is_token_authority() + tka.outputs = tka.outputs[:-2] + # HACK: We don't clear the sighash cache on purpose so we don't need to re-sign the tx + # tka.clear_sighash_cache() + + # Add GRANT action to TKA + set_nano_header( + tx=tka, + wallet=self.wallet, + nc_id=self.blueprint_id, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.ALL_AUTHORITIES) + ], + nc_method='initialize', + blueprint=MyBlueprint, + seqnum=0, + ) + + # Before executing TKA, nobody can mint or melt + artifacts.propagate_with(self.manager, up_to='TKA') + token_info = self.tokens_index.get_token_info(tka.hash) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert not token_info.can_mint() + assert not token_info.can_melt() + + # After b11, TKA is executed and holds authorities + artifacts.propagate_with(self.manager, up_to='b11') + assert tka.get_metadata().nc_execution is NCExecutionState.SUCCESS + + storage = self.manager.get_best_block_nc_storage(tka.hash) + assert storage.get_balance(tka.hash) == Balance(value=0, can_mint=True, can_melt=True) + + token_info = self.tokens_index.get_token_info(tka.hash) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert token_info.can_mint() + assert token_info.can_melt() + + # Even though I'm not setting authority actions here, I have to set the header manually instead of using the + # DAG builder because it doesn't know TKA is a NC. + set_nano_header( + tx=tx1, + wallet=self.wallet, + nc_id=tka.hash, + nc_method='revoke_all', + nc_args=(tka.hash,), + blueprint=MyBlueprint, + seqnum=1, + ) + + # After b12, all authorities are revoked + artifacts.propagate_with(self.manager, up_to='b12') + assert tx1.get_metadata().nc_execution is NCExecutionState.SUCCESS + + storage = self.manager.get_best_block_nc_storage(tka.hash) + assert storage.get_balance(tka.hash) == Balance(value=0, can_mint=False, can_melt=False) + + token_info = self.tokens_index.get_token_info(tka.hash) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert not token_info.can_mint() + assert not token_info.can_melt() + + def test_grant_action_then_reorg(self) -> None: + artifacts = self.dag_builder.build_from_str(''' + blockchain genesis b[1..11] + blockchain b10 a[11..12] + b10 < dummy < TKA + a12.weight = 3 # Necessary to force the reorg + + tx1.out[0] = 1000 TKA # To force TKA to be a token creation tx + + TKA <-- b11 + b11 < a11 + ''') + artifacts.propagate_with(self.manager, up_to='dummy') + b11, a11 = artifacts.get_typed_vertices(['b11', 'a11'], Block) + tka = artifacts.get_typed_vertex('TKA', Transaction) + + # Remove authority outputs so no UTXOs have them + assert tka.outputs[-1].is_token_authority() + assert tka.outputs[-2].is_token_authority() + tka.outputs = tka.outputs[:-2] + # HACK: We don't clear the sighash cache on purpose so we don't need to re-sign the tx + # tka.clear_sighash_cache() + + # Add GRANT action to TKA + set_nano_header( + tx=tka, + wallet=self.wallet, + nc_id=self.blueprint_id, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.ALL_AUTHORITIES) + ], + nc_method='initialize', + blueprint=MyBlueprint, + seqnum=0, + ) + + # Before executing TKA, nobody can mint or melt + artifacts.propagate_with(self.manager, up_to='TKA') + token_info = self.tokens_index.get_token_info(tka.hash) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert not token_info.can_mint() + assert not token_info.can_melt() + + # After b11, TKA is executed and holds authorities + artifacts.propagate_with(self.manager, up_to='b11') + assert b11.get_metadata().voided_by is None + assert tka.get_metadata().first_block == b11.hash + assert tka.get_metadata().nc_execution is NCExecutionState.SUCCESS + + storage = self.manager.get_best_block_nc_storage(tka.hash) + assert storage.get_balance(tka.hash) == Balance(value=0, can_mint=True, can_melt=True) + + token_info = self.tokens_index.get_token_info(tka.hash) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert token_info.can_mint() + assert token_info.can_melt() + + # After a12, a reorg happens un-executing TKA + artifacts.propagate_with(self.manager, up_to='a12') + assert b11.get_metadata().voided_by == {b11.hash} + assert a11.get_metadata().voided_by is None + assert tka.get_metadata().first_block is None + assert tka.get_metadata().nc_execution is NCExecutionState.PENDING + + with pytest.raises(NanoContractDoesNotExist): + self.manager.get_best_block_nc_storage(tka.hash) + + token_info = self.tokens_index.get_token_info(tka.hash) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert not token_info.can_mint() + assert not token_info.can_melt() + + def test_acquire_action_then_revoke(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..14] + b10 < dummy + + nc1a.nc_id = "{self.blueprint_id.hex()}" + nc1a.nc_method = initialize() + + nc1b.nc_id = nc1a + nc1b.nc_method = create_token() + nc1b.nc_deposit = 1000 HTR + + nc2a.nc_id = "{self.blueprint_id.hex()}" + nc2a.nc_method = initialize() + + nc2b.nc_id = nc2a + nc2b.nc_method = acquire_authority(`nc1a`) + + nc1c.nc_id = nc1a + nc1c.nc_method = revoke_all(null) + + nc2c.nc_id = nc2a + nc2c.nc_method = revoke_all(null) + + nc1a <-- nc1b <-- nc2a <-- nc2b <-- nc1c <-- nc2c + nc1b <-- b11 + nc2b <-- b12 + nc1c <-- b13 + nc2c <-- b14 + ''') + artifacts.propagate_with(self.manager, up_to='dummy') + nc1a, nc1b, nc1c, nc2a, nc2b, nc2c = artifacts.get_typed_vertices( + ['nc1a', 'nc1b', 'nc1c', 'nc2a', 'nc2b', 'nc2c'], + Transaction + ) + tka = derive_child_token_id(ContractId(VertexId(nc1a.hash)), 'TKA') + + # Before executing nc1b, the token doesn't exist + artifacts.propagate_with(self.manager, up_to='nc1b') + with pytest.raises(KeyError): + self.tokens_index.get_token_info(tka) + + # After b11, nc1b is executed and holds authorities + artifacts.propagate_with(self.manager, up_to='b11') + assert nc1b.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert nc2b.get_metadata().nc_execution is None + + nc1_storage = self.manager.get_best_block_nc_storage(nc1a.hash) + assert nc1_storage.get_balance(tka) == Balance(value=1000, can_mint=True, can_melt=True) + + token_info = self.tokens_index.get_token_info(tka) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert token_info.can_mint() + assert token_info.can_melt() + + # After b12, nc2b is executed and also holds authorities + artifacts.propagate_with(self.manager, up_to='b12') + assert nc2b.get_metadata().nc_execution is NCExecutionState.SUCCESS + + nc1_storage = self.manager.get_best_block_nc_storage(nc1a.hash) + nc2_storage = self.manager.get_best_block_nc_storage(nc2a.hash) + assert nc1_storage.get_balance(tka) == Balance(value=1000, can_mint=True, can_melt=True) + assert nc2_storage.get_balance(tka) == Balance(value=0, can_mint=True, can_melt=True) + + token_info = self.tokens_index.get_token_info(tka) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert token_info.can_mint() + assert token_info.can_melt() + + # After b13, authorities are revoked from nc1a + artifacts.propagate_with(self.manager, up_to='b13') + assert nc1c.get_metadata().nc_execution is NCExecutionState.SUCCESS + + nc1_storage = self.manager.get_best_block_nc_storage(nc1a.hash) + nc2_storage = self.manager.get_best_block_nc_storage(nc2a.hash) + assert nc1_storage.get_balance(tka) == Balance(value=1000, can_mint=False, can_melt=False) + assert nc2_storage.get_balance(tka) == Balance(value=0, can_mint=True, can_melt=True) + + token_info = self.tokens_index.get_token_info(tka) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert token_info.can_mint() + assert token_info.can_melt() + + # Finally, after b14, authorities are revoked from nc2a and the token index reflects that nobody can mint/melt + artifacts.propagate_with(self.manager, up_to='b14') + assert nc2c.get_metadata().nc_execution is NCExecutionState.SUCCESS + + nc1_storage = self.manager.get_best_block_nc_storage(nc1a.hash) + nc2_storage = self.manager.get_best_block_nc_storage(nc2a.hash) + assert nc1_storage.get_balance(tka) == Balance(value=1000, can_mint=False, can_melt=False) + assert nc2_storage.get_balance(tka) == Balance(value=0, can_mint=False, can_melt=False) + + token_info = self.tokens_index.get_token_info(tka) + assert list(token_info.iter_mint_utxos()) == [] + assert list(token_info.iter_melt_utxos()) == [] + assert not token_info.can_mint() + assert not token_info.can_melt() diff --git a/tests/nanocontracts/test_blueprint.py b/tests/nanocontracts/test_blueprint.py new file mode 100644 index 000000000..212aa09d7 --- /dev/null +++ b/tests/nanocontracts/test_blueprint.py @@ -0,0 +1,326 @@ +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import BlueprintSyntaxError, NCFail, NCInsufficientFunds, NCViewMethodError +from hathor.nanocontracts.nc_types import make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.storage import NCBlockStorage, NCMemoryStorageFactory +from hathor.nanocontracts.storage.backends import MemoryNodeTrieStore +from hathor.nanocontracts.storage.contract_storage import Balance, BalanceKey +from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie +from hathor.nanocontracts.types import ( + Address, + BlueprintId, + ContractId, + NCDepositAction, + NCWithdrawalAction, + TokenUid, + VertexId, + public, + view, +) +from tests import unittest +from tests.nanocontracts.utils import TestRunner + +STR_NC_TYPE = make_nc_type(str) +BYTES_NC_TYPE = make_nc_type(bytes) +INT_NC_TYPE = make_nc_type(int) +BOOL_NC_TYPE = make_nc_type(bool) + +MOCK_ADDRESS = Address(b'') + + +class SimpleFields(Blueprint): + a: str + b: bytes + c: int + d: bool + + @public + def initialize(self, ctx: Context, a: str, b: bytes, c: int, d: bool) -> None: + self.a = a + self.b = b + self.c = c + self.d = d + + # Read the content of the variable. + if self.a: + pass + + +class ContainerFields(Blueprint): + a: dict[str, str] + b: dict[str, bytes] + c: dict[str, int] + + def _set(self, _dict, key, value): + _dict[key] = value + assert key in _dict + assert _dict[key] == value + del _dict[key] + assert key not in _dict + _dict[key] = value + + @public + def initialize(self, ctx: Context, items: list[tuple[str, str, bytes, int]]) -> None: + for key, va, vb, vc in items: + self._set(self.a, key, va) + self._set(self.b, key, vb) + self._set(self.c, key, vc) + + +class MyBlueprint(Blueprint): + a: int + + @public + def initialize(self, ctx: Context) -> None: + self.a = 1 + + @public(allow_deposit=True, allow_withdrawal=True) + def nop(self, ctx: Context) -> None: + pass + + @public + def fail(self, ctx: Context) -> None: + self.a = 2 + raise NCFail() + self.a = 3 + + @view + def my_private_method_fail(self) -> None: + # This operation is not permitted because private methods + # cannot change the transaction state. + self.a = 2 + + @view + def my_private_method_nop(self) -> int: + return 1 + + +class NCBlueprintTestCase(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.simple_fields_id = ContractId(VertexId(b'1' * 32)) + self.container_fields_id = ContractId(VertexId(b'2' * 32)) + self.my_blueprint_id = ContractId(VertexId(b'3' * 32)) + + nc_storage_factory = NCMemoryStorageFactory() + store = MemoryNodeTrieStore() + block_trie = PatriciaTrie(store) + block_storage = NCBlockStorage(block_trie) + self.manager = self.create_peer('unittests') + self.runner = TestRunner( + self.manager.tx_storage, nc_storage_factory, block_storage, settings=self._settings, reactor=self.reactor + ) + + self.blueprint_ids: dict[str, BlueprintId] = { + 'simple_fields': BlueprintId(VertexId(b'a' * 32)), + 'container_fields': BlueprintId(VertexId(b'b' * 32)), + 'my_blueprint': BlueprintId(VertexId(b'c' * 32)), + } + + nc_catalog = self.manager.tx_storage.nc_catalog + nc_catalog.blueprints[self.blueprint_ids['simple_fields']] = SimpleFields + nc_catalog.blueprints[self.blueprint_ids['container_fields']] = ContainerFields + nc_catalog.blueprints[self.blueprint_ids['my_blueprint']] = MyBlueprint + + genesis = self.manager.tx_storage.get_all_genesis() + self.tx = [t for t in genesis if t.is_transaction][0] + + def test_simple_fields(self) -> None: + blueprint_id = self.blueprint_ids['simple_fields'] + nc_id = self.simple_fields_id + + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + a = 'str' + b = b'bytes' + c = 123 + d = True + self.runner.create_contract(nc_id, blueprint_id, ctx, a, b, c, d) + + storage = self.runner.get_storage(nc_id) + self.assertEqual(storage.get_obj(b'a', STR_NC_TYPE), a) + self.assertEqual(storage.get_obj(b'b', BYTES_NC_TYPE), b) + self.assertEqual(storage.get_obj(b'c', INT_NC_TYPE), c) + self.assertEqual(storage.get_obj(b'd', BOOL_NC_TYPE), d) + + def test_container_fields(self) -> None: + blueprint_id = self.blueprint_ids['container_fields'] + nc_id = self.container_fields_id + + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + items = [ + ('a', '1', b'1', 1), + ('b', '2', b'2', 2), + ('c', '3', b'3', 3), + ] + self.runner.create_contract(nc_id, blueprint_id, ctx, items) + + storage = self.runner.get_storage(nc_id) + self.assertEqual(storage.get_obj(b'a:\x01a', STR_NC_TYPE), '1') + self.assertEqual(storage.get_obj(b'a:\x01b', STR_NC_TYPE), '2') + self.assertEqual(storage.get_obj(b'a:\x01c', STR_NC_TYPE), '3') + + def _create_my_blueprint_contract(self) -> None: + blueprint_id = self.blueprint_ids['my_blueprint'] + nc_id = self.my_blueprint_id + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(nc_id, blueprint_id, ctx) + + def test_public_method_fails(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + + with self.assertRaises(NCFail): + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.call_public_method(nc_id, 'fail', ctx) + self.assertEqual(1, storage.get_obj(b'a', INT_NC_TYPE)) + + def test_private_method_change_state(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + with self.assertRaises(NCViewMethodError): + self.runner.call_view_method(nc_id, 'my_private_method_fail') + + def test_private_method_success(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + self.assertEqual(1, self.runner.call_view_method(nc_id, 'my_private_method_nop')) + + def test_initial_balance(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + self.assertEqual(Balance(value=0, can_mint=False, can_melt=False), storage.get_balance(MOCK_ADDRESS)) + + def test_nop(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.call_public_method(nc_id, 'nop', ctx) + + def test_withdrawal_fail(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + token_uid = TokenUid(b'\0') + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=1)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + with self.assertRaises(NCInsufficientFunds): + self.runner.call_public_method(nc_id, 'nop', ctx) + + def test_deposits_and_withdrawals(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + token_uid = TokenUid(b'\0') + ctx = Context( + [NCDepositAction(token_uid=token_uid, amount=100)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=1)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=99, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=50)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=49, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=50)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + with self.assertRaises(NCInsufficientFunds): + self.runner.call_public_method(nc_id, 'nop', ctx) + + def test_withdraw_wrong_token(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + + token_uid = TokenUid(b'\0') + wrong_token_uid = TokenUid(b'\1') + + ctx = Context( + [NCDepositAction(token_uid=token_uid, amount=100)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=wrong_token_uid, amount=1)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + with self.assertRaises(NCInsufficientFunds): + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + def test_invalid_field(self) -> None: + with self.assertRaises(BlueprintSyntaxError): + class WrongBlueprint(Blueprint): + a: float + + @public + def initialize(self, ctx: Context) -> None: + self.a = 1.2 + + def test_balances(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + + token_uid = TokenUid(b'\0') # HTR + ctx = Context( + [NCDepositAction(token_uid=token_uid, amount=100)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + token_uid2 = TokenUid(b'\0' + b'\1' * 31) + ctx = Context( + [NCDepositAction(token_uid=token_uid2, amount=200)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=200, can_mint=False, can_melt=False), storage.get_balance(token_uid2)) + + all_balances = storage.get_all_balances() + key1 = BalanceKey(nc_id, token_uid) + key2 = BalanceKey(nc_id, token_uid2) + + self.assertEqual( + all_balances, + { + key1: Balance(value=100, can_mint=False, can_melt=False), + key2: Balance(value=200, can_mint=False, can_melt=False), + } + ) diff --git a/tests/nanocontracts/test_blueprint_syntax.py b/tests/nanocontracts/test_blueprint_syntax.py new file mode 100644 index 000000000..ba651d633 --- /dev/null +++ b/tests/nanocontracts/test_blueprint_syntax.py @@ -0,0 +1,566 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public, view +from hathor.nanocontracts.exception import BlueprintSyntaxError +from hathor.nanocontracts.types import Address, NCArgs, fallback +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class TestBlueprintSyntax(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id = self.gen_random_blueprint_id() + self.contract_id = self.gen_random_contract_id() + self.ctx = Context( + actions=[], + vertex=self.get_genesis_tx(), + address=Address(self.gen_random_address()), + timestamp=self.now, + ) + + def test_success(self) -> None: + class MyBlueprint(Blueprint): + a: str + + @public + def initialize(self, ctx: Context, a: int) -> int: + return a + + @view + def some_view(self, a: int) -> int: + return a + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> int: + return 123 + + self.nc_catalog.blueprints[self.blueprint_id] = MyBlueprint + self.runner.create_contract(self.contract_id, self.blueprint_id, self.ctx, 123) + + def test_forbidden_field_name(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='field name is forbidden: `log`'): + class MyBlueprint(Blueprint): + log: str # type: ignore + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_field_name_with_underscore(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='field name cannot start with underscore: `_a`'): + class MyBlueprint(Blueprint): + _a: str + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_field_with_default(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='fields with default values are currently not supported: `a`'): + class MyBlueprint(Blueprint): + a: str = 'a' + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_no_initialize(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='blueprints require a method called `initialize`'): + class MyBlueprint(Blueprint): + pass + + def test_initialize_non_public(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='`initialize` method must be annotated with @public'): + class MyBlueprint(Blueprint): + def initialize(self, ctx: Context) -> None: + pass + + def test_initialize_view(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='`initialize` method cannot be annotated with @view'): + class MyBlueprint(Blueprint): + @view + def initialize(self, ctx: Context) -> None: + pass + + def test_initialize_fallback(self) -> None: + msg = '@fallback method must be called `fallback`: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @fallback + def initialize(self, ctx: Context) -> None: + pass + + def test_public_missing_self(self) -> None: + msg = '@public method must have `self` argument: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize() -> None: # type: ignore + pass + + def test_public_wrong_self(self) -> None: + msg = '@public method first argument must be called `self`: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(wrong) -> None: + pass + + def test_public_typed_self(self) -> None: + msg = '@public method `self` argument must not be typed: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self: int) -> None: # type: ignore + pass + + def test_view_missing_self(self) -> None: + msg = '@view method must have `self` argument: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop() -> None: # type: ignore + pass + + def test_view_wrong_self(self) -> None: + msg = '@view method first argument must be called `self`: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(wrong) -> None: + pass + + def test_view_typed_self(self) -> None: + msg = '@view method `self` argument must not be typed: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self: int) -> None: # type: ignore + pass + + def test_fallback_missing_self(self) -> None: + msg = '@fallback method must have `self` argument: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback() -> None: # type: ignore + pass + + def test_fallback_wrong_self(self) -> None: + msg = '@fallback method first argument must be called `self`: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(wrong) -> None: + pass + + def test_fallback_typed_self(self) -> None: + msg = '@fallback method `self` argument must not be typed: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self: int) -> None: # type: ignore + pass + + def test_public_missing_context(self) -> None: + msg = '@public method must have `Context` argument: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self) -> None: + pass + + def test_public_context_different_name_success(self) -> None: + class MyBlueprint(Blueprint): + @public + def initialize(self, context: Context) -> None: + pass + + self.nc_catalog.blueprints[self.blueprint_id] = MyBlueprint + self.runner.create_contract(self.contract_id, self.blueprint_id, self.ctx) + + def test_public_context_untyped(self) -> None: + msg = 'argument `ctx` on method `initialize` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx) -> None: # type: ignore + pass + + def test_public_context_wrong_type(self) -> None: + msg = '@public method second arg `ctx` argument must be of type `Context`: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: int) -> None: + pass + + def test_fallback_missing_context(self) -> None: + msg = '@fallback method must have `Context` argument: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self) -> None: + pass + + def test_fallback_context_untyped(self) -> None: + msg = 'argument `ctx` on method `fallback` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx) -> None: # type: ignore + pass + + def test_fallback_context_wrong_type(self) -> None: + msg = '@fallback method second arg `ctx` argument must be of type `Context`: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: int) -> None: + pass + + def test_view_with_ctx(self) -> None: + msg = '@view method cannot have arg with type `Context`: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, ctx: Context) -> None: + pass + + def test_view_with_context_type(self) -> None: + msg = '@view method cannot have arg with type `Context`: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, a: int, b: Context) -> None: + pass + + def test_cannot_have_multiple_method_types1(self) -> None: + msg = 'method must be annotated with at most one method type: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + @view + def nop(self) -> None: + pass + + def test_cannot_have_multiple_method_types2(self) -> None: + msg = 'method must be annotated with at most one method type: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + @view + def nop(self) -> None: + pass + + def test_invalid_field_type(self) -> None: + msg = 'unsupported field type: `a: float`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + a: float + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_public_missing_arg_type(self) -> None: + msg = 'argument `a` on method `initialize` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context, a) -> None: # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_public_invalid_arg_type(self) -> None: + msg = 'unsupported type `float` on argument `a` of method `initialize`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context, a: float) -> None: + pass + + def test_public_missing_return_type(self) -> None: + msg = 'missing return type on method `initialize`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context): # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_public_invalid_return_type(self) -> None: + msg = 'unsupported return type `float` on method `initialize`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> float: + return 0 + + def test_view_missing_arg_type(self) -> None: + msg = 'argument `a` on method `nop` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, a) -> None: # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_view_invalid_arg_type(self) -> None: + msg = 'unsupported type `float` on argument `a` of method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, a: float) -> None: + pass + + def test_view_missing_return_type(self) -> None: + msg = 'missing return type on method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self): + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_view_invalid_return_type(self) -> None: + msg = 'unsupported return type `float` on method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self) -> float: + return 0 + + def test_fallback_missing_args1(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context) -> None: + pass + + def test_fallback_missing_args2(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str) -> None: + pass + + def test_fallback_missing_arg_type1(self) -> None: + msg = 'argument `method_name` on method `fallback` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name, args_bytes: bytes) -> None: # type: ignore + pass + + def test_fallback_missing_arg_type2(self) -> None: + msg = 'argument `args_bytes` on method `fallback` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, args_bytes) -> None: # type: ignore + pass + + def test_fallback_wrong_arg_type1(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: int, args_bytes: bytes) -> None: + pass + + def test_fallback_wrong_arg_type2(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, args_bytes: int) -> None: + pass + + def test_fallback_missing_return_type(self) -> None: + msg = 'missing return type on method `fallback`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs): # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_fallback_invalid_return_type(self) -> None: + msg = 'unsupported return type `float` on method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> float: + return 0 + + def test_fallback_wrong_name(self) -> None: + msg = '@fallback method must be called `fallback`: `wrong()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def wrong(self) -> None: + pass + + def test_fallback_not_annotated(self) -> None: + msg = '`fallback` method must be annotated with @fallback' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + def fallback(self) -> None: + pass + + def test_fallback_view(self) -> None: + msg = '`fallback` method cannot be annotated with @view' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def fallback(self) -> None: + pass + + def test_fallback_public(self) -> None: + msg = '`fallback` method cannot be annotated with @public' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def fallback(self) -> None: + pass diff --git a/tests/nanocontracts/test_blueprints/__init__.py b/tests/nanocontracts/test_blueprints/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/nanocontracts/test_blueprints/all_fields.py b/tests/nanocontracts/test_blueprints/all_fields.py new file mode 100644 index 000000000..f88ca7f65 --- /dev/null +++ b/tests/nanocontracts/test_blueprints/all_fields.py @@ -0,0 +1,69 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import OrderedDict +from typing import NamedTuple, Optional, Union + +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.types import ( + Address, + Amount, + BlueprintId, + ContractId, + SignedData, + Timestamp, + TokenUid, + TxOutputScript, + VertexId, + public, +) + + +class MyTuple(NamedTuple): + a: int + b: str + + +class AllFieldsBlueprint(Blueprint): + attribute1: OrderedDict[str, int] + attribute2: list[int] + attribute3: set[int] + attribute4: bool + attribute5: bytes + attribute6: dict[str, int] + attribute7: frozenset[int] + attribute8: int + attribute9: str + attribute10: dict[str, tuple[int]] + attribute11: tuple[str, int] + attribute12: tuple[str, ...] + attribute13: Union[str, None] + attribute14: Optional[str] + attribute15: str | None + attribute16: None | str + attribute17: Address + attribute18: Amount + attribute19: BlueprintId + attribute20: ContractId + attribute21: Timestamp + attribute22: TokenUid + attribute23: TxOutputScript + attribute24: VertexId + attribute25: SignedData[str] + attribute26: MyTuple + + @public + def initialize(self, ctx: Context) -> None: + pass diff --git a/tests/nanocontracts/test_blueprints/bet.py b/tests/nanocontracts/test_blueprints/bet.py new file mode 100644 index 000000000..fe81332b8 --- /dev/null +++ b/tests/nanocontracts/test_blueprints/bet.py @@ -0,0 +1,224 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from math import floor +from typing import Optional, TypeAlias + +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.types import ( + Address, + NCAction, + NCDepositAction, + NCWithdrawalAction, + SignedData, + Timestamp, + TokenUid, + TxOutputScript, + public, + view, +) + +Result: TypeAlias = str +Amount: TypeAlias = int + + +class InvalidToken(NCFail): + pass + + +class ResultAlreadySet(NCFail): + pass + + +class ResultNotAvailable(NCFail): + pass + + +class TooManyActions(NCFail): + pass + + +class TooLate(NCFail): + pass + + +class InsufficientBalance(NCFail): + pass + + +class InvalidOracleSignature(NCFail): + pass + + +class Bet(Blueprint): + """Bet blueprint with final result provided by an oracle. + + The life cycle of contracts using this blueprint is the following: + + 1. [Owner ] Create a contract. + 2. [User 1] `bet(...)` on result A. + 3. [User 2] `bet(...)` on result A. + 4. [User 3] `bet(...)` on result B. + 5. [Oracle] `set_result(...)` as result A. + 6. [User 1] `withdraw(...)` + 7. [User 2] `withdraw(...)` + + Notice that, in the example above, users 1 and 2 won. + """ + + # Total bets per result. + bets_total: dict[Result, Amount] + + # Total bets per (result, address). + bets_address: dict[tuple[Result, Address], Amount] + + # Bets grouped by address. + address_details: dict[Address, dict[Result, Amount]] + + # Amount that has already been withdrawn per address. + withdrawals: dict[Address, Amount] + + # Total bets. + total: Amount + + # Final result. + final_result: Optional[Result] + + # Oracle script to set the final result. + oracle_script: TxOutputScript + + # Maximum timestamp to make a bet. + date_last_bet: Timestamp + + # Token for this bet. + token_uid: TokenUid + + @public + def initialize(self, ctx: Context, oracle_script: TxOutputScript, token_uid: TokenUid, + date_last_bet: Timestamp) -> None: + if len(ctx.actions) != 0: + raise NCFail('must be a single call') + self.oracle_script = oracle_script + self.token_uid = token_uid + self.date_last_bet = date_last_bet + self.final_result = None + self.total = Amount(0) + + @view + def has_result(self) -> bool: + """Return True if the final result has already been set.""" + return bool(self.final_result is not None) + + def fail_if_result_is_available(self) -> None: + """Fail the execution if the final result has already been set.""" + if self.has_result(): + raise ResultAlreadySet + + def fail_if_result_is_not_available(self) -> None: + """Fail the execution if the final result is not available yet.""" + if not self.has_result(): + raise ResultNotAvailable + + def fail_if_invalid_token(self, action: NCAction) -> None: + """Fail the execution if the token is invalid.""" + if action.token_uid != self.token_uid: + token1 = self.token_uid.hex() if self.token_uid else None + token2 = action.token_uid.hex() if action.token_uid else None + raise InvalidToken(f'invalid token ({token1} != {token2})') + + def _get_action(self, ctx: Context) -> NCAction: + """Return the only action available; fails otherwise.""" + if len(ctx.actions) != 1: + raise TooManyActions('only one token supported') + if self.token_uid not in ctx.actions: + raise InvalidToken(f'token different from {self.token_uid.hex()}') + return ctx.get_single_action(self.token_uid) + + @public(allow_deposit=True) + def bet(self, ctx: Context, address: Address, score: str) -> None: + """Make a bet.""" + action = self._get_action(ctx) + assert isinstance(action, NCDepositAction) + self.fail_if_result_is_available() + self.fail_if_invalid_token(action) + if ctx.timestamp > self.date_last_bet: + raise TooLate(f'cannot place bets after {self.date_last_bet}') + amount = Amount(action.amount) + self.total = Amount(self.total + amount) + if score not in self.bets_total: + self.bets_total[score] = amount + else: + self.bets_total[score] += amount + key = (score, address) + if key not in self.bets_address: + self.bets_address[key] = amount + else: + self.bets_address[key] += amount + + # Update dict indexed by address + partial = self.address_details.get(address, {}) + partial.update({ + score: self.bets_address[key] + }) + + self.address_details[address] = partial + + @public + def set_result(self, ctx: Context, result: SignedData[Result]) -> None: + """Set final result. This method is called by the oracle.""" + self.fail_if_result_is_available() + if not result.checksig(self.syscall.get_contract_id(), self.oracle_script): + raise InvalidOracleSignature + self.final_result = result.data + + @public(allow_withdrawal=True) + def withdraw(self, ctx: Context) -> None: + """Withdraw tokens after the final result is set.""" + action = self._get_action(ctx) + assert isinstance(action, NCWithdrawalAction) + self.fail_if_result_is_not_available() + self.fail_if_invalid_token(action) + address = Address(ctx.address) + allowed = self.get_max_withdrawal(address) + if action.amount > allowed: + raise InsufficientBalance(f'withdrawal amount is greater than available (max: {allowed})') + if address not in self.withdrawals: + self.withdrawals[address] = action.amount + else: + self.withdrawals[address] += action.amount + + @view + def get_max_withdrawal(self, address: Address) -> Amount: + """Return the maximum amount available for withdrawal.""" + total = self.get_winner_amount(address) + withdrawals = self.withdrawals.get(address, Amount(0)) + return total - withdrawals + + @view + def get_winner_amount(self, address: Address) -> Amount: + """Return how much an address has won.""" + self.fail_if_result_is_not_available() + if self.final_result not in self.bets_total: + return Amount(0) + result_total = self.bets_total[self.final_result] + if result_total == 0: + return Amount(0) + address_total = self.bets_address.get((self.final_result, address), 0) + percentage = address_total / result_total + return Amount(floor(percentage * self.total)) + + +__blueprint__ = Bet diff --git a/tests/nanocontracts/test_blueprints/swap_demo.py b/tests/nanocontracts/test_blueprints/swap_demo.py new file mode 100644 index 000000000..6deb55228 --- /dev/null +++ b/tests/nanocontracts/test_blueprints/swap_demo.py @@ -0,0 +1,97 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.types import NCDepositAction, NCWithdrawalAction, TokenUid, public, view + + +class SwapDemo(Blueprint): + """Blueprint to execute swaps between tokens. + This blueprint is here just as a reference for blueprint developers, not for real use. + """ + + # TokenA identifier and quantity multiplier. + token_a: TokenUid + multiplier_a: int + + # TokenB identifier and quantity multiplier. + token_b: TokenUid + multiplier_b: int + + # Count number of swaps executed. + swaps_counter: int + + @public(allow_deposit=True) + def initialize( + self, + ctx: Context, + token_a: TokenUid, + token_b: TokenUid, + multiplier_a: int, + multiplier_b: int + ) -> None: + """Initialize the contract.""" + + if token_a == token_b: + raise NCFail + + if set(ctx.actions.keys()) != {token_a, token_b}: + raise InvalidTokens + + self.token_a = token_a + self.token_b = token_b + self.multiplier_a = multiplier_a + self.multiplier_b = multiplier_b + self.swaps_counter = 0 + + @public(allow_deposit=True, allow_withdrawal=True) + def swap(self, ctx: Context) -> None: + """Execute a token swap.""" + + if set(ctx.actions.keys()) != {self.token_a, self.token_b}: + raise InvalidTokens + + action_a = ctx.get_single_action(self.token_a) + action_b = ctx.get_single_action(self.token_b) + + if not ( + (isinstance(action_a, NCDepositAction) and isinstance(action_b, NCWithdrawalAction)) + or (isinstance(action_a, NCWithdrawalAction) and isinstance(action_b, NCDepositAction)) + ): + raise InvalidActions + + if not self.is_ratio_valid(action_a.amount, action_b.amount): + raise InvalidRatio + + # All good! Let's accept the transaction. + self.swaps_counter += 1 + + @view + def is_ratio_valid(self, qty_a: int, qty_b: int) -> bool: + """Check if the swap quantities are valid.""" + return (self.multiplier_a * qty_a == self.multiplier_b * qty_b) + + +class InvalidTokens(NCFail): + pass + + +class InvalidActions(NCFail): + pass + + +class InvalidRatio(NCFail): + pass diff --git a/tests/nanocontracts/test_blueprints/test_blueprint1.py b/tests/nanocontracts/test_blueprints/test_blueprint1.py new file mode 100644 index 000000000..14e00d3a8 --- /dev/null +++ b/tests/nanocontracts/test_blueprints/test_blueprint1.py @@ -0,0 +1,27 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.types import public + + +class TestBlueprint1(Blueprint): + @public + def initialize(self, ctx: Context, a: int) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + pass diff --git a/tests/nanocontracts/test_call_other_contract.py b/tests/nanocontracts/test_call_other_contract.py new file mode 100644 index 000000000..c01cb1b15 --- /dev/null +++ b/tests/nanocontracts/test_call_other_contract.py @@ -0,0 +1,475 @@ +import sys + +import pytest + +from hathor.nanocontracts import Blueprint, Context, NCFail, public, view +from hathor.nanocontracts.exception import ( + NCInsufficientFunds, + NCInvalidContractId, + NCInvalidInitializeMethodCall, + NCNumberOfCallsExceeded, + NCRecursionError, + NCUninitializedContractError, + NCViewMethodError, +) +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.storage import NCBlockStorage, NCMemoryStorageFactory +from hathor.nanocontracts.storage.backends import MemoryNodeTrieStore +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie +from hathor.nanocontracts.types import ( + Address, + BlueprintId, + ContractId, + NCAction, + NCDepositAction, + NCWithdrawalAction, + TokenUid, + VertexId, +) +from tests import unittest +from tests.nanocontracts.utils import TestRunner + +COUNTER_NC_TYPE = make_nc_type(int) +CONTRACT_NC_TYPE: NCType[ContractId | None] = make_nc_type(ContractId | None) # type: ignore[arg-type] +MOCK_ADDRESS = Address(b'') + + +class ZeroedCounterFail(NCFail): + pass + + +class MyBlueprint(Blueprint): + counter: int + contract: ContractId | None + + @public(allow_deposit=True) + def initialize(self, ctx: Context, initial: int) -> None: + self.counter = initial + self.contract = None + + @public + def set_contract(self, ctx: Context, contract: ContractId) -> None: + self.contract = contract + + @public(allow_deposit=True) + def split_balance(self, ctx: Context) -> None: + if self.contract is None: + return + + actions = [] + for action in ctx.__all_actions__: + assert isinstance(action, NCDepositAction) + amount = 1 + action.amount // 2 + actions.append(NCDepositAction(token_uid=action.token_uid, amount=amount)) + self.syscall.call_public_method(self.contract, 'split_balance', actions) + + @public(allow_withdrawal=True) + def get_tokens_from_another_contract(self, ctx: Context) -> None: + if self.contract is None: + return + + actions = [] + for action in ctx.__all_actions__: + assert isinstance(action, NCWithdrawalAction) + balance = self.syscall.get_balance_before_current_call(action.token_uid) + diff = balance - action.amount + if diff < 0: + actions.append(NCWithdrawalAction(token_uid=action.token_uid, amount=-diff)) + + if actions: + self.syscall.call_public_method(self.contract, 'get_tokens_from_another_contract', actions) + + @public + def dec(self, ctx: Context, fail_on_zero: bool) -> None: + if self.counter == 0: + if fail_on_zero: + raise ZeroedCounterFail + else: + return + self.counter -= 1 + if self.contract: + actions: list[NCAction] = [] + self.syscall.call_public_method(self.contract, 'dec', actions, fail_on_zero=fail_on_zero) + + @public + def non_stop_call(self, ctx: Context) -> None: + assert self.contract is not None + while True: + actions: list[NCAction] = [] + self.syscall.call_public_method(self.contract, 'dec', actions, fail_on_zero=False) + + @view + def get_total_counter(self) -> int: + mine = self.counter + other = 0 + if self.contract: + other = self.syscall.call_view_method(self.contract, 'get_counter') + return mine + other + + @public + def dec_and_get_counter(self, ctx: Context) -> int: + assert self.contract is not None + self.dec(ctx, fail_on_zero=True) + other = self.syscall.call_view_method(self.contract, 'get_counter') + return self.counter + other + + @view + def get_counter(self) -> int: + return self.counter + + @public + def invalid_call_initialize(self, ctx: Context) -> None: + assert self.contract is not None + self.syscall.call_public_method(self.contract, 'initialize', []) + + @view + def invalid_call_public_from_view(self) -> None: + assert self.contract is not None + self.syscall.call_public_method(self.contract, 'dec', []) + + @view + def invalid_call_view_itself(self) -> int: + return self.syscall.call_view_method(self.syscall.get_contract_id(), 'get_counter') + + +class NCBlueprintTestCase(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + + self.manager = self.create_peer('unittests') + self.genesis = self.manager.tx_storage.get_all_genesis() + self.tx = [t for t in self.genesis if t.is_transaction][0] + + nc_storage_factory = NCMemoryStorageFactory() + store = MemoryNodeTrieStore() + block_trie = PatriciaTrie(store) + block_storage = NCBlockStorage(block_trie=block_trie) + self.runner = TestRunner( + self.manager.tx_storage, nc_storage_factory, block_storage, settings=self._settings, reactor=self.reactor + ) + + self.blueprint_id = BlueprintId(VertexId(b'a' * 32)) + + nc_catalog = self.manager.tx_storage.nc_catalog + nc_catalog.blueprints[self.blueprint_id] = MyBlueprint + + self.nc1_id = ContractId(VertexId(b'1' * 32)) + self.nc2_id = ContractId(VertexId(b'2' * 32)) + self.nc3_id = ContractId(VertexId(b'3' * 32)) + + def test_failing(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 5) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 1) + self.runner.create_contract(self.nc3_id, self.blueprint_id, ctx, 3) + + self.runner.call_public_method(self.nc2_id, 'set_contract', ctx, self.nc1_id) + self.runner.call_public_method(self.nc3_id, 'set_contract', ctx, self.nc2_id) + + storage1 = self.runner.get_storage(self.nc1_id) + self.assertEqual(storage1.get_obj(b'counter', COUNTER_NC_TYPE), 5) + self.assertEqual(storage1.get_obj(b'contract', CONTRACT_NC_TYPE), None) + + storage2 = self.runner.get_storage(self.nc2_id) + self.assertEqual(storage2.get_obj(b'counter', COUNTER_NC_TYPE), 1) + self.assertEqual(storage2.get_obj(b'contract', CONTRACT_NC_TYPE), self.nc1_id) + + storage3 = self.runner.get_storage(self.nc3_id) + self.assertEqual(storage3.get_obj(b'counter', COUNTER_NC_TYPE), 3) + self.assertEqual(storage3.get_obj(b'contract', CONTRACT_NC_TYPE), self.nc2_id) + + self.runner.call_public_method(self.nc3_id, 'dec', ctx, fail_on_zero=True) + self.assertEqual(storage1.get_obj(b'counter', COUNTER_NC_TYPE), 4) + self.assertEqual(storage2.get_obj(b'counter', COUNTER_NC_TYPE), 0) + self.assertEqual(storage3.get_obj(b'counter', COUNTER_NC_TYPE), 2) + + with self.assertRaises(ZeroedCounterFail): + self.runner.call_public_method(self.nc3_id, 'dec', ctx, fail_on_zero=True) + + self.assertEqual(storage1.get_obj(b'counter', COUNTER_NC_TYPE), 4) + self.assertEqual(storage2.get_obj(b'counter', COUNTER_NC_TYPE), 0) + self.assertEqual(storage3.get_obj(b'counter', COUNTER_NC_TYPE), 2) + + def test_call_itself(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 10) + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc1_id) + + with pytest.raises(NCInvalidContractId, match='a contract cannot call itself'): + self.runner.call_public_method(self.nc1_id, 'dec', ctx, fail_on_zero=True) + + def test_call_itself_view(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 10) + + with pytest.raises(NCInvalidContractId, match='a contract cannot call itself'): + self.runner.call_view_method(self.nc1_id, 'invalid_call_view_itself') + + def test_call_initialize(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 10) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 10) + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + + with self.assertRaises(NCInvalidInitializeMethodCall): + self.runner.call_public_method(self.nc1_id, 'invalid_call_initialize', ctx) + + def test_call_public_from_view(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 10) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 10) + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + + with self.assertRaises(NCViewMethodError): + self.runner.call_view_method(self.nc1_id, 'invalid_call_public_from_view') + + def test_call_uninitialize_contract(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 10) + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + + with self.assertRaises(NCUninitializedContractError): + self.runner.call_public_method(self.nc1_id, 'dec', ctx, fail_on_zero=True) + + def test_recursion_error(self) -> None: + # Each call to `self.call_public_method()` in the blueprint adds 8 frames to the call stack. + # To trigger an NCRecursionError (instead of Python's built-in RecursionError), + # we need to increase the recursion limit accordingly. + sys.setrecursionlimit(5000) + + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 100_000) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 100_000) + + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + self.runner.call_public_method(self.nc2_id, 'set_contract', ctx, self.nc1_id) + + with self.assertRaises(NCRecursionError): + self.runner.call_public_method(self.nc1_id, 'dec', ctx, fail_on_zero=True) + trace = self.runner.get_last_call_info() + assert trace.calls is not None + self.assertEqual(len(trace.calls), self.runner.MAX_RECURSION_DEPTH) + + def test_max_calls_exceeded(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 0) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 0) + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + + with self.assertRaises(NCNumberOfCallsExceeded): + self.runner.call_public_method(self.nc1_id, 'non_stop_call', ctx) + trace = self.runner.get_last_call_info() + assert trace.calls is not None + self.assertEqual(len(trace.calls), self.runner.MAX_CALL_COUNTER) + + def test_getting_funds_from_another_contract(self) -> None: + token1_uid = TokenUid(self._settings.HATHOR_TOKEN_UID) + token2_uid = TokenUid(b'b' * 32) + token3_uid = TokenUid(b'c' * 32) + + actions: list[NCAction] = [ + NCDepositAction(token_uid=token1_uid, amount=11), + NCDepositAction(token_uid=token2_uid, amount=12), + NCDepositAction(token_uid=token3_uid, amount=13), + ] + ctx = Context(actions, self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 0) + self.assertEqual( + Balance(value=11, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token1_uid) + ) + self.assertEqual( + Balance(value=12, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token2_uid) + ) + self.assertEqual( + Balance(value=13, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token3_uid) + ) + + actions = [ + NCDepositAction(token_uid=token1_uid, amount=21), + NCDepositAction(token_uid=token2_uid, amount=22), + NCDepositAction(token_uid=token3_uid, amount=23), + ] + ctx = Context(actions, self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 0) + self.assertEqual( + Balance(value=21, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token1_uid) + ) + self.assertEqual( + Balance(value=22, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token2_uid) + ) + self.assertEqual( + Balance(value=23, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token3_uid) + ) + + actions = [ + NCDepositAction(token_uid=token1_uid, amount=31), + NCDepositAction(token_uid=token2_uid, amount=32), + NCDepositAction(token_uid=token3_uid, amount=33), + ] + ctx = Context(actions, self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc3_id, self.blueprint_id, ctx, 0) + self.assertEqual( + Balance(value=31, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token1_uid) + ) + self.assertEqual( + Balance(value=32, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token2_uid) + ) + self.assertEqual( + Balance(value=33, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token3_uid) + ) + + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + self.runner.call_public_method(self.nc2_id, 'set_contract', ctx, self.nc3_id) + + actions = [ + NCWithdrawalAction(token_uid=token1_uid, amount=7), + NCWithdrawalAction(token_uid=token2_uid, amount=18), + NCWithdrawalAction(token_uid=token3_uid, amount=65), + ] + ctx = Context(actions, self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.call_public_method(self.nc1_id, 'get_tokens_from_another_contract', ctx) + + self.assertEqual( + Balance(value=4, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token1_uid) + ) + self.assertEqual( + Balance(value=0, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token2_uid) + ) + self.assertEqual( + Balance(value=0, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token3_uid) + ) + + self.assertEqual( + Balance(value=21, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token1_uid) + ) + self.assertEqual( + Balance(value=16, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token2_uid) + ) + self.assertEqual( + Balance(value=0, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token3_uid) + ) + + self.assertEqual( + Balance(value=31, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token1_uid) + ) + self.assertEqual( + Balance(value=32, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token2_uid) + ) + self.assertEqual( + Balance(value=4, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token3_uid) + ) + + ctx = Context( + [NCWithdrawalAction(token_uid=token1_uid, amount=100)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + with self.assertRaises(NCInsufficientFunds): + self.runner.call_public_method(self.nc1_id, 'get_tokens_from_another_contract', ctx) + + def test_transfer_between_contracts(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 1) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 20) + self.runner.create_contract(self.nc3_id, self.blueprint_id, ctx, 300) + + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + self.runner.call_public_method(self.nc2_id, 'set_contract', ctx, self.nc3_id) + + total_counter = self.runner.call_view_method(self.nc1_id, 'get_total_counter') + self.assertEqual(total_counter, 21) + + total_counter = self.runner.call_view_method(self.nc2_id, 'get_total_counter') + self.assertEqual(total_counter, 320) + + token1_uid = TokenUid(self._settings.HATHOR_TOKEN_UID) + token2_uid = TokenUid(b'b' * 32) + token3_uid = TokenUid(b'c' * 32) + + actions = [ + NCDepositAction(token_uid=token1_uid, amount=100), + NCDepositAction(token_uid=token2_uid, amount=50), + NCDepositAction(token_uid=token3_uid, amount=25), + ] + ctx = Context(actions, self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.call_public_method(self.nc1_id, 'split_balance', ctx) + + self.assertEqual( + Balance(value=49, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token1_uid) + ) + self.assertEqual( + Balance(value=24, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token2_uid) + ) + self.assertEqual( + Balance(value=12, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc1_id, token3_uid) + ) + + self.assertEqual( + Balance(value=25, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token1_uid) + ) + self.assertEqual( + Balance(value=12, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token2_uid) + ) + self.assertEqual( + Balance(value=6, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc2_id, token3_uid) + ) + + self.assertEqual( + Balance(value=26, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token1_uid) + ) + self.assertEqual( + Balance(value=14, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token2_uid) + ) + self.assertEqual( + Balance(value=7, can_mint=False, can_melt=False), self.runner.get_current_balance(self.nc3_id, token3_uid) + ) + + def test_loop(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 8) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 3) + self.runner.create_contract(self.nc3_id, self.blueprint_id, ctx, 6) + + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + self.runner.call_public_method(self.nc2_id, 'set_contract', ctx, self.nc3_id) + self.runner.call_public_method(self.nc3_id, 'set_contract', ctx, self.nc1_id) + + storage1 = self.runner.get_storage(self.nc1_id) + self.assertEqual(storage1.get_obj(b'counter', COUNTER_NC_TYPE), 8) + self.assertEqual(storage1.get_obj(b'contract', CONTRACT_NC_TYPE), self.nc2_id) + + storage2 = self.runner.get_storage(self.nc2_id) + self.assertEqual(storage2.get_obj(b'counter', COUNTER_NC_TYPE), 3) + self.assertEqual(storage2.get_obj(b'contract', CONTRACT_NC_TYPE), self.nc3_id) + + storage3 = self.runner.get_storage(self.nc3_id) + self.assertEqual(storage3.get_obj(b'counter', COUNTER_NC_TYPE), 6) + self.assertEqual(storage3.get_obj(b'contract', CONTRACT_NC_TYPE), self.nc1_id) + + self.runner.call_public_method(self.nc1_id, 'dec', ctx, fail_on_zero=False) + self.assertEqual(storage1.get_obj(b'counter', COUNTER_NC_TYPE), 4) + self.assertEqual(storage2.get_obj(b'counter', COUNTER_NC_TYPE), 0) + self.assertEqual(storage3.get_obj(b'counter', COUNTER_NC_TYPE), 3) + + def test_call_view_after_public(self) -> None: + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(self.nc1_id, self.blueprint_id, ctx, 8) + self.runner.create_contract(self.nc2_id, self.blueprint_id, ctx, 3) + + self.runner.call_public_method(self.nc1_id, 'set_contract', ctx, self.nc2_id) + + storage1 = self.runner.get_storage(self.nc1_id) + self.assertEqual(storage1.get_obj(b'counter', COUNTER_NC_TYPE), 8) + self.assertEqual(storage1.get_obj(b'contract', CONTRACT_NC_TYPE), self.nc2_id) + + storage2 = self.runner.get_storage(self.nc2_id) + self.assertEqual(storage2.get_obj(b'counter', COUNTER_NC_TYPE), 3) + self.assertEqual(storage2.get_obj(b'contract', CONTRACT_NC_TYPE), None) + + result = self.runner.call_public_method(self.nc1_id, 'dec_and_get_counter', ctx) + self.assertEqual(storage1.get_obj(b'counter', COUNTER_NC_TYPE), 7) + self.assertEqual(storage2.get_obj(b'counter', COUNTER_NC_TYPE), 2) + self.assertEqual(result, 9) diff --git a/tests/nanocontracts/test_consensus.py b/tests/nanocontracts/test_consensus.py new file mode 100644 index 000000000..eb87a0ae1 --- /dev/null +++ b/tests/nanocontracts/test_consensus.py @@ -0,0 +1,1419 @@ +from typing import Any, cast + +from hathor.conf import HathorSettings +from hathor.crypto.util import get_address_from_public_key_bytes +from hathor.exception import InvalidNewTransaction +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.exception import NCFail, NCInvalidSignature +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.nc_types import make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import NCAction, NCActionType, NCDepositAction, NCWithdrawalAction, TokenUid +from hathor.nanocontracts.utils import sign_pycoin +from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks +from hathor.simulator.utils import add_new_blocks +from hathor.transaction import BaseTransaction, Block, Transaction, TxOutput +from hathor.transaction.headers import NanoHeader +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.types import VertexId +from hathor.wallet.base_wallet import WalletOutputInfo +from tests.dag_builder.builder import TestDAGBuilder +from tests.simulation.base import SimulatorTestCase +from tests.utils import add_blocks_unlock_reward, add_custom_tx, create_tokens, gen_custom_base_tx + +settings = HathorSettings() + +INT_NC_TYPE = make_nc_type(int) +TOKEN_NC_TYPE = make_nc_type(TokenUid) + + +class MyBlueprint(Blueprint): + total: int + token_uid: TokenUid + counter: int + + @public + def initialize(self, ctx: Context, token_uid: TokenUid) -> None: + self.total = 0 + self.counter = 0 + self.token_uid = token_uid + + def _get_action(self, ctx: Context) -> NCAction: + if len(ctx.actions) != 1: + raise NCFail('only one token allowed') + if self.token_uid not in ctx.actions: + raise NCFail('invalid token') + action = ctx.get_single_action(self.token_uid) + if action.token_uid != self.token_uid: + raise NCFail('invalid token') + return action + + @public + def nop(self, ctx: Context, a: int) -> None: + self.counter += 1 + + @public(allow_deposit=True) + def deposit(self, ctx: Context) -> None: + self.counter += 1 + action = self._get_action(ctx) + assert isinstance(action, NCDepositAction) + self.total += action.amount + + @public(allow_withdrawal=True) + def withdraw(self, ctx: Context) -> None: + self.counter += 1 + action = self._get_action(ctx) + assert isinstance(action, NCWithdrawalAction) + self.total -= action.amount + + @public + def fail_on_zero(self, ctx: Context) -> None: + if self.counter == 0: + raise NCFail('counter is zero') + + +class NCConsensusTestCase(SimulatorTestCase): + __test__ = True + + def setUp(self): + super().setUp() + + self.myblueprint_id = b'x' * 32 + self.catalog = NCBlueprintCatalog({ + self.myblueprint_id: MyBlueprint + }) + self.nc_seqnum = 0 + + self.manager = self.simulator.create_peer() + self.manager.allow_mining_without_peers() + self.manager.tx_storage.nc_catalog = self.catalog + + self.wallet = self.manager.wallet + + self.miner = self.simulator.create_miner(self.manager, hashpower=100e6) + self.miner.start() + + self.token_uid = TokenUid(b'\0') + trigger = StopAfterMinimumBalance(self.wallet, self.token_uid, 1) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + def assertNoBlocksVoided(self): + for blk in self.manager.tx_storage.get_all_transactions(): + if not blk.is_block: + continue + meta = blk.get_metadata() + self.assertIsNone(meta.voided_by) + + def _gen_nc_tx( + self, + nc_id: VertexId, + nc_method: str, + nc_args: list[Any], + nc: BaseTransaction | None = None, + *, + address: str | None = None, + nc_actions: list[NanoHeaderAction] | None = None, + is_custom_token: bool = False, + ) -> Transaction: + method_parser = Method.from_callable(getattr(MyBlueprint, nc_method)) + + if nc is None: + nc = Transaction(timestamp=int(self.manager.reactor.seconds())) + assert isinstance(nc, Transaction) + + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + if address is None: + address = self.wallet.get_unused_address() + privkey = self.wallet.get_private_key(address) + + nano_header = NanoHeader( + tx=nc, + nc_seqnum=self.nc_seqnum, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=nc_actions or [], + ) + nc.headers.append(nano_header) + self.nc_seqnum += 1 + + if is_custom_token: + nc.tokens = [self.token_uid] + + sign_pycoin(nano_header, privkey) + self._finish_preparing_tx(nc) + self.manager.reactor.advance(10) + return nc + + def _finish_preparing_tx(self, tx: Transaction, *, set_timestamp: bool = True) -> Transaction: + if set_timestamp: + tx.timestamp = int(self.manager.reactor.seconds()) + tx.parents = self.manager.get_new_tx_parents() + tx.weight = self.manager.daa.minimum_tx_weight(tx) + return tx + + def _run_invalid_signature(self, attr, value, cause=NCInvalidSignature): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + tx = self._gen_nc_tx(nc.hash, 'deposit', []) + nano_header = tx.get_nano_header() + self.assertNotEqual(getattr(nano_header, attr), value) + setattr(nano_header, attr, value) + tx.weight = self.manager.daa.minimum_tx_weight(tx) + self.manager.cpu_mining_service.resolve(tx) + + tx.clear_sighash_cache() + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.on_new_tx(tx) + exc = cm.exception + self.assertIsInstance(exc.__cause__, cause) + + def test_nc_consensus_invalid_signature_change_nc_method(self): + self._run_invalid_signature('nc_method', 'withdraw') + + def test_nc_consensus_invalid_signature_change_nc_id(self): + self._run_invalid_signature('nc_id', b'y' * 32) + + def test_nc_consensus_invalid_signature_change_nc_args_bytes(self): + self._run_invalid_signature('nc_args_bytes', b'x') + + def test_nc_consensus_invalid_signature_change_nc_address_1(self): + self._run_invalid_signature('nc_address', b'x', cause=NCInvalidSignature) + + def test_nc_consensus_invalid_signature_change_nc_address_2(self): + privkey = self.wallet.get_key_at_index(100) + pubkey_bytes = privkey.sec() + address = get_address_from_public_key_bytes(pubkey_bytes) + self._run_invalid_signature('nc_address', address) + + def test_nc_consensus_execution_fails(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + tx = self._gen_nc_tx(nc.hash, 'deposit', []) + self.manager.cpu_mining_service.resolve(tx) + self.manager.on_new_tx(tx) + self.assertIsNone(tx.get_metadata().voided_by) + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + meta = tx.get_metadata() + self.assertIsNotNone(meta.first_block) + self.assertEqual(meta.voided_by, {tx.hash, NC_EXECUTION_FAIL_ID}) + + # add another block that confirms tx + self._add_new_block(tx_parents=[ + tx.hash, + tx.parents[0], + ]) + + self.assertNoBlocksVoided() + + def test_nc_consensus_success_custom_token(self) -> None: + token_creation_tx = create_tokens(self.manager, mint_amount=100, use_genesis=False, propagate=False) + self._finish_preparing_tx(token_creation_tx, set_timestamp=False) + self.manager.cpu_mining_service.resolve(token_creation_tx) + self.manager.on_new_tx(token_creation_tx) + + self.token_uid = token_creation_tx.hash + self.test_nc_consensus_success(is_custom_token=True) + + def test_nc_consensus_success(self, *, is_custom_token: bool = False) -> None: + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + nc_id = nc.hash + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(14400, trigger=trigger)) + nc_loaded = self.manager.tx_storage.get_transaction(nc_id) + nc_loaded_meta = nc_loaded.get_metadata() + self.assertIsNotNone(nc_loaded_meta.first_block) + self.assertIsNone(nc_loaded_meta.voided_by) + + block_initialize = self.manager.tx_storage.get_best_block() + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual(nc_storage.get_obj(b'token_uid', TOKEN_NC_TYPE), self.token_uid) + + # Make a deposit. + + add_blocks_unlock_reward(self.manager) + _inputs, deposit_amount = self.wallet.get_inputs_from_amount( + 1, self.manager.tx_storage, token_uid=self.token_uid + ) + tx = self.wallet.prepare_transaction(Transaction, _inputs, [], timestamp=int(self.manager.reactor.seconds())) + tx = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx, is_custom_token=is_custom_token, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=1 if is_custom_token else 0, + amount=deposit_amount, + ) + ]) + self.manager.cpu_mining_service.resolve(tx) + self.manager.on_new_tx(tx) + self.assertIsNone(tx.get_metadata().voided_by) + + add_new_blocks(self.manager, 2, advance_clock=1) + + meta = tx.get_metadata() + self.assertIsNotNone(meta.first_block) + self.assertIsNone(meta.voided_by) + + block_deposit = self.manager.tx_storage.get_best_block() + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Make a withdrawal of 1 HTR. + + _output_token_index = 0 + _tokens = [] + if is_custom_token: + _tokens.append(self.token_uid) + _output_token_index = 1 + + tx2 = Transaction( + outputs=[TxOutput(1, b'', _output_token_index)], + timestamp=int(self.manager.reactor.seconds()), + ) + tx2.tokens = _tokens + tx2 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=1 if is_custom_token else 0, + amount=1, + ) + ]) + self.manager.cpu_mining_service.resolve(tx2) + self.manager.on_new_tx(tx2) + self.assertIsNone(tx2.get_metadata().voided_by) + + add_new_blocks(self.manager, 2, advance_clock=1) + + meta2 = tx2.get_metadata() + self.assertIsNotNone(meta2.first_block) + self.assertIsNone(meta2.voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount - 1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Make a withdrawal of the remainder. + + tx3 = Transaction( + outputs=[TxOutput(deposit_amount - 2, b'', _output_token_index)], + timestamp=int(self.manager.reactor.seconds()), + ) + tx3.tokens = _tokens + tx3 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx3, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=1 if is_custom_token else 0, + amount=deposit_amount - 2, + ) + ]) + self.manager.cpu_mining_service.resolve(tx3) + self.manager.on_new_tx(tx3) + self.assertIsNone(tx3.get_metadata().voided_by) + + add_new_blocks(self.manager, 2, advance_clock=1) + + meta3 = tx3.get_metadata() + self.assertIsNotNone(meta3.first_block) + self.assertIsNone(meta3.voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual(Balance(value=1, can_mint=False, can_melt=False), nc_storage.get_balance(self.token_uid)) + + # Try to withdraw more than available, so it fails. + + _output_token_index = 0 + _tokens = [] + if is_custom_token: + _tokens.append(self.token_uid) + _output_token_index = 1 + + tx4 = Transaction( + outputs=[TxOutput(2, b'', _output_token_index)], + timestamp=int(self.manager.reactor.seconds()), + ) + tx4.tokens = _tokens + tx4 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx4, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=1 if is_custom_token else 0, + amount=2, + ) + ]) + self.manager.cpu_mining_service.resolve(tx4) + self.manager.on_new_tx(tx4) + self.assertIsNone(tx4.get_metadata().voided_by) + + add_new_blocks(self.manager, 2, advance_clock=1) + + meta4 = tx4.get_metadata() + self.assertIsNotNone(meta4.first_block) + self.assertEqual(meta4.voided_by, {tx4.hash, NC_EXECUTION_FAIL_ID}) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual(Balance(value=1, can_mint=False, can_melt=False), nc_storage.get_balance(self.token_uid)) + + self.assertNoBlocksVoided() + + # Check balance at different blocks + + nc_storage = self.manager.get_nc_storage(block_initialize, nc_id) + self.assertEqual(Balance(value=0, can_mint=False, can_melt=False), nc_storage.get_balance(self.token_uid)) + + nc_storage = self.manager.get_nc_storage(block_deposit, nc_id) + self.assertEqual( + Balance(value=deposit_amount, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + def test_nc_consensus_failure_voided_by_propagation(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # tx1 is a NanoContract transaction and will fail execution. + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 1, None), + WalletOutputInfo(address, 1, None), + ] + tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + tx1 = self._gen_nc_tx(nc.hash, 'deposit', [], nc=tx1) + self.manager.cpu_mining_service.resolve(tx1) + self.manager.on_new_tx(tx1) + self.assertIsNone(tx1.get_metadata().voided_by) + + # add tx21 spending tx1 in mempool before tx1 has been executed + tx21 = add_custom_tx(self.manager, tx_inputs=[(tx1, 0)]) + tx21_meta = tx21.get_metadata() + self.assertIsNone(tx21_meta.voided_by) + + # add tx22 with tx1 as parent in mempool before tx1 has been executed + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 1, None), + ] + tx22 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + self._finish_preparing_tx(tx22) + tx22.parents[0] = tx1.hash + self.manager.cpu_mining_service.resolve(tx22) + self.manager.on_new_tx(tx22) + tx22_meta = tx22.get_metadata() + self.assertIsNone(tx22_meta.voided_by) + + # executes tx1 and asserts the final state + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + # confirm that tx1 failed execution. + meta = tx1.get_metadata() + self.assertIsNotNone(meta.first_block) + self.assertEqual(meta.voided_by, {NC_EXECUTION_FAIL_ID, tx1.hash}) + + # tx21 must be voided because it spends an input from tx and tx failed execution. + self.assertEqual(tx21_meta.voided_by, {tx1.hash}) + + # tx22 will not be voided because it just verifies tx1 + tx22_meta = tx22.get_metadata() + self.assertIsNone(tx22_meta.voided_by) + + # add tx31 spending tx1 in mempool after tx1 has been executed + tx31 = add_custom_tx(self.manager, tx_inputs=[(tx1, 1)]) + tx31_meta = tx31.get_metadata() + self.assertEqual(tx31_meta.voided_by, {tx1.hash}) + + # add tx32 spending tx22 in mempool after tx1 has been executed + tx32 = add_custom_tx(self.manager, tx_inputs=[(tx22, 0)]) + self.assertIn(tx1.hash, tx32.parents) + tx32_meta = tx32.get_metadata() + self.assertIsNone(tx32_meta.voided_by) + + # add tx33 in mempool, it spends tx1 with conflict after tx1 has been executed + tx33 = add_custom_tx(self.manager, tx_inputs=[(tx1, 0)]) + tx33_meta = tx33.get_metadata() + self.assertEqual(tx33_meta.voided_by, {tx1.hash, tx33.hash}) + + # confirm that tx1 inputs are unspent (i.e., they are still UTXOs). + tx1in = tx1.inputs[0] + tx1_spent_tx = self.manager.tx_storage.get_transaction(tx1in.tx_id) + tx1_spent_idx = tx1in.index + tx34 = add_custom_tx(self.manager, tx_inputs=[(tx1_spent_tx, tx1_spent_idx)]) + tx34_meta = tx34.get_metadata() + self.assertIsNone(tx34_meta.voided_by) + + self.assertNoBlocksVoided() + + def test_nc_consensus_chain_fail(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # tx1 is a NanoContract transaction and will fail execution. + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 1, None), + WalletOutputInfo(address, 1, None), + ] + tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + tx1 = self._gen_nc_tx(nc.hash, 'deposit', [], nc=tx1) + self.manager.cpu_mining_service.resolve(tx1) + + # tx2 is a NanoContract transaction independent of tx1 + tx2 = self._gen_nc_tx(nc.hash, 'nop', [1]) + self.manager.cpu_mining_service.resolve(tx2) + + # propagate both tx1 and tx2 + self.assertTrue(self.manager.on_new_tx(tx1)) + self.assertTrue(self.manager.on_new_tx(tx2)) + + # tx3 is a NanoContract transaction that has tx1 as parent + tx3 = self._gen_nc_tx(nc.hash, 'nop', [1]) + if tx1.hash not in tx3.parents: + tx3.parents[0] = tx1.hash + tx3.timestamp += 1 + self.manager.cpu_mining_service.resolve(tx3) + self.assertTrue(self.manager.on_new_tx(tx3)) + + # tx4 is a NanoContract transaction that spents tx1 output. + tx4 = gen_custom_base_tx(self.manager, tx_inputs=[(tx1, 0)]) + self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx4) + tx4.timestamp += 2 + # self.assertNotIn(tx1.hash, tx4.parents) + self.manager.cpu_mining_service.resolve(tx4) + self.assertTrue(self.manager.on_new_tx(tx4)) + + # tx5 is a NanoContract transaction that spents tx4 output. + tx5 = gen_custom_base_tx(self.manager, tx_inputs=[(tx4, 0)]) + self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx5) + tx5.timestamp += 3 + # self.assertNotIn(tx1.hash, tx5.parents) + self.manager.cpu_mining_service.resolve(tx5) + self.assertTrue(self.manager.on_new_tx(tx5)) + + # execute all transactions. + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + # assert state after execution (tx1 fails, tx2 executes) + self.assertEqual(tx1.get_metadata().voided_by, {tx1.hash, NC_EXECUTION_FAIL_ID}) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx3.get_metadata().voided_by) + self.assertEqual(tx4.get_metadata().voided_by, {tx1.hash}) + self.assertEqual(tx5.get_metadata().voided_by, {tx1.hash}) + + nc_storage = self.manager.get_best_block_nc_storage(nc.hash) + self.assertEqual(2, nc_storage.get_obj(b'counter', INT_NC_TYPE)) + + def _add_new_block(self, + *, + parents: list[VertexId] | None = None, + tx_parents: list[VertexId] | None = None, + parent_block_hash: VertexId | None = None) -> Block: + if parents: + assert len(parents) == 3 + assert parent_block_hash is None + assert tx_parents is None + parent_block_hash = parents[0] + tx_parents = parents[1:] + block = self.manager.generate_mining_block(parent_block_hash=parent_block_hash) + if tx_parents is not None: + assert len(tx_parents) == 2 + block.parents[1] = tx_parents[0] + block.parents[2] = tx_parents[1] + self.manager.cpu_mining_service.resolve(block) + self.manager.propagate_tx(block) + return block + + def test_nc_consensus_reorg(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + nc_id = nc.hash + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # Generate two addresses. + address1 = self.wallet.get_address(self.wallet.get_key_at_index(0)) + address2 = self.wallet.get_address(self.wallet.get_key_at_index(1)) + self.assertNotEqual(address1, address2) + + # Prepare three sibling transactions. + _inputs, deposit_amount_1 = self.wallet.get_inputs_from_amount(6500, self.manager.tx_storage) + tx1 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx1 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx1, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_1, + ) + ]) + self.manager.cpu_mining_service.resolve(tx1) + + self.manager.reactor.advance(10) + + withdrawal_amount_1 = deposit_amount_1 - 100 + tx11 = Transaction(outputs=[TxOutput(withdrawal_amount_1, b'', 0)]) + tx11 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx11, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=0, + amount=withdrawal_amount_1, + ) + ]) + tx11.weight += 1 + self.manager.cpu_mining_service.resolve(tx11) + + self.manager.reactor.advance(10) + + _inputs, deposit_amount_2 = self.wallet.get_inputs_from_amount(3, self.manager.tx_storage) + tx2 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx2 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx2, address=address2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_2, + ) + ]) + tx2.weight += 1 + self.manager.cpu_mining_service.resolve(tx2) + + self.assertGreater(deposit_amount_1, deposit_amount_2) + self.assertGreater(withdrawal_amount_1, deposit_amount_2) + + # Propagate tx1, tx2, and tx11. + self.manager.on_new_tx(tx1) + self.manager.on_new_tx(tx2) + self.manager.on_new_tx(tx11) + + # Add a block that executes tx1 and tx11 (but not tx2). + blk10 = self._add_new_block(tx_parents=[ + tx1.hash, + tx1.parents[0], + ]) + blk_base_hash = blk10.parents[0] + + blk11 = self._add_new_block(tx_parents=[ + tx1.hash, + tx11.hash, + ]) + + self.assertEqual(tx1.get_metadata().first_block, blk10.hash) + self.assertIsNone(tx2.get_metadata().first_block) + self.assertEqual(tx11.get_metadata().first_block, blk11.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx11.get_metadata().voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_1 - withdrawal_amount_1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Cause a reorg that will execute tx2 and tx11 (but not tx1). + blk20 = self._add_new_block(parents=[ + blk_base_hash, + tx2.hash, + tx2.parents[0], + ]) + blk21 = self._add_new_block(parents=[ + blk20.hash, + tx2.hash, + tx11.hash, + ]) + self._add_new_block(parents=[ + blk21.hash, + blk21.parents[1], + blk21.parents[2], + ]) + + self.assertIsNone(tx1.get_metadata().first_block) + self.assertEqual(tx2.get_metadata().first_block, blk20.hash) + self.assertEqual(tx11.get_metadata().first_block, blk21.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertEqual(tx11.get_metadata().voided_by, {tx11.hash, NC_EXECUTION_FAIL_ID}) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_2, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + def test_nc_consensus_reorg_fail_before_reorg(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + nc_id = nc.hash + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # Generate two addresses. + address1 = self.wallet.get_address(self.wallet.get_key_at_index(0)) + address2 = self.wallet.get_address(self.wallet.get_key_at_index(1)) + self.assertNotEqual(address1, address2) + + # Prepare three sibling transactions. + _inputs, deposit_amount_2 = self.wallet.get_inputs_from_amount(6500, self.manager.tx_storage) + tx2 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx2 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx2, address=address2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_2, + ) + ]) + self.manager.cpu_mining_service.resolve(tx2) + + self.manager.reactor.advance(10) + + withdrawal_amount_1 = deposit_amount_2 - 100 + tx11 = Transaction(outputs=[TxOutput(withdrawal_amount_1, b'', 0)]) + tx11 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx11, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=0, + amount=withdrawal_amount_1, + ) + ]) + tx11.weight += 1 + self.manager.cpu_mining_service.resolve(tx11) + + self.manager.reactor.advance(10) + + _inputs, deposit_amount_1 = self.wallet.get_inputs_from_amount(1, self.manager.tx_storage) + tx1 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx1 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx1, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_1, + ) + ]) + tx1.weight += 2 + self.manager.cpu_mining_service.resolve(tx1) + + self.assertGreater(deposit_amount_2, deposit_amount_1) + self.assertGreater(withdrawal_amount_1, deposit_amount_1) + + # Propagate tx1, tx2, and tx11. + self.manager.on_new_tx(tx1) + self.manager.on_new_tx(tx2) + self.manager.on_new_tx(tx11) + + # Add a block that executes tx1 and tx11 (but not tx2). + blk10 = self._add_new_block(tx_parents=[ + tx1.hash, + tx11.hash, + ]) + blk_base_hash = blk10.parents[0] + + self.assertEqual(tx1.get_metadata().first_block, blk10.hash) + self.assertIsNone(tx2.get_metadata().first_block) + self.assertEqual(tx11.get_metadata().first_block, blk10.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertEqual(tx11.get_metadata().voided_by, {tx11.hash, NC_EXECUTION_FAIL_ID}) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Cause a reorg that will execute tx2 and tx11 (but not tx1). + blk20 = self._add_new_block(parents=[ + blk_base_hash, + tx2.hash, + tx2.parents[0], + ]) + blk21 = self._add_new_block(parents=[ + blk20.hash, + tx2.hash, + tx11.hash, + ]) + + self.assertIsNone(tx1.get_metadata().first_block) + self.assertEqual(tx2.get_metadata().first_block, blk20.hash) + self.assertEqual(tx11.get_metadata().first_block, blk21.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx11.get_metadata().voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_2 - withdrawal_amount_1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + def _prepare_nc_consensus_conflict(self, *, conflict_with_nano: bool) -> tuple[Transaction, ...]: + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # tx0 is a regular transaction with one output + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 10, None), + ] + tx0 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + self._finish_preparing_tx(tx0) + self.manager.cpu_mining_service.resolve(tx0) + self.manager.reactor.advance(60) + + # tx1 is a NanoContract transaction and will fail execution. + tx1 = gen_custom_base_tx(self.manager, tx_inputs=[(tx0, 0)]) + self.assertEqual(len(tx1.outputs), 1) + tx1.outputs[0].value = 3 + tx1 = self._gen_nc_tx(nc.hash, 'deposit', [], nc=tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=tx0.outputs[0].value - 3, + ) + ]) + self.manager.cpu_mining_service.resolve(tx1) + + # tx2 is a NanoContract transaction that spends tx1. + tx2 = gen_custom_base_tx(self.manager, tx_inputs=[(tx1, 0)]) + tx2 = self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx2) + self.manager.cpu_mining_service.resolve(tx2) + + # tx1b is in conflict with tx1 + if conflict_with_nano: + tx1b = gen_custom_base_tx(self.manager, tx_inputs=[(tx0, 0)]) + self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx1b) + else: + tx1b = gen_custom_base_tx(self.manager, tx_inputs=[(tx0, 0)]) + self.manager.cpu_mining_service.resolve(tx1b) + + # propagate both tx1 and tx2 + self.assertTrue(self.manager.on_new_tx(tx0)) + self.assertTrue(self.manager.on_new_tx(tx1)) + self.assertTrue(self.manager.on_new_tx(tx1b)) + self.assertTrue(self.manager.on_new_tx(tx2)) + + return cast(tuple[Transaction, ...], (tx0, tx1, tx1b, tx2)) + + def _run_nc_consensus_conflict_block_voided_1(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block must be voided because it confirms both tx1 and tx1b. + block = self.manager.generate_mining_block() + block.parents = [ + block.parents[0], + tx1.hash, + tx1b.hash, + ] + self.manager.cpu_mining_service.resolve(block) + self.assertTrue(self.manager.on_new_tx(block)) + self.assertTrue(block.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_voided_1(self) -> None: + self._run_nc_consensus_conflict_block_voided_1(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_voided_1(self) -> None: + self._run_nc_consensus_conflict_block_voided_1(conflict_with_nano=True) + + def _run_nc_consensus_conflict_block_voided_2(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block will be executed. + b0 = self.manager.generate_mining_block() + b0.parents = [ + b0.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b0) + self.assertTrue(self.manager.on_new_tx(b0)) + self.assertIsNone(b0.get_metadata().voided_by) + + # this block will be voided because it confirms tx1b. + b1 = self.manager.generate_mining_block() + b1.parents = [ + b1.parents[0], + tx1b.hash, + tx1b.parents[0], + ] + self.manager.cpu_mining_service.resolve(b1) + self.assertTrue(self.manager.on_new_tx(b1)) + self.assertIsNotNone(b1.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_voided_2(self) -> None: + self._run_nc_consensus_conflict_block_voided_2(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_voided_2(self) -> None: + self._run_nc_consensus_conflict_block_voided_2(conflict_with_nano=True) + + def _run_nc_consensus_conflict_block_executed_1(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block will be confirmed first. + b0 = self.manager.generate_mining_block() + b0.parents = [ + b0.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b0) + + # this block will cause a reorg. + b1 = self.manager.generate_mining_block() + b1.weight += 1 + b1.parents = [ + b1.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b1) + + self.assertTrue(self.manager.on_new_tx(b0)) + self.assertIsNone(b0.get_metadata().voided_by) + self.assertTrue(self.manager.on_new_tx(b1)) + self.assertIsNotNone(b0.get_metadata().voided_by) + self.assertIsNone(b1.get_metadata().voided_by) + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNotNone(tx1b.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_executed_1(self) -> None: + self._run_nc_consensus_conflict_block_executed_1(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_executed_1(self) -> None: + self._run_nc_consensus_conflict_block_executed_1(conflict_with_nano=True) + + def _run_nc_consensus_conflict_block_executed_2(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block is executed. + b0 = self.manager.generate_mining_block() + b0.parents = [ + b0.parents[0], + tx1b.hash, + tx1b.parents[0], + ] + self.manager.cpu_mining_service.resolve(b0) + + # this block will cause a reorg. + b1 = self.manager.generate_mining_block() + b1.weight += 1 + b1.parents = [ + b1.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b1) + + self.assertTrue(self.manager.on_new_tx(b0)) + self.assertIsNone(b0.get_metadata().voided_by) + self.assertIsNotNone(tx1.get_metadata().voided_by) + self.assertIsNotNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx1b.get_metadata().voided_by) + + self.assertTrue(self.manager.on_new_tx(b1)) + self.assertIsNotNone(b0.get_metadata().voided_by) + self.assertIsNone(b1.get_metadata().voided_by) + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNotNone(tx1b.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_executed_2(self) -> None: + self._run_nc_consensus_conflict_block_executed_2(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_executed_2(self) -> None: + self._run_nc_consensus_conflict_block_executed_2(conflict_with_nano=True) + + def test_nc_consensus_voided_tx_at_mempool(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + vertices = dag_builder.build_from_str(f''' + blockchain genesis b[1..40] + b30 < dummy + + tx1.nc_id = "{self.myblueprint_id.hex()}" + tx1.nc_method = initialize("00") + + # tx2 will fail because it does not have a deposit + tx2.nc_id = tx1 + tx2.nc_method = deposit() + tx2.out[0] <<< tx3 + + # tx3 will be voided because tx2 failed execution + tx3.nc_id = tx1 + tx3.nc_method = nop(1) + + b31 --> tx1 + b32 --> tx2 + b33 --> tx3 + ''') + + for node, vertex in vertices.list: + print() + print(node.name) + print() + self.manager.on_new_tx(vertex) + + b31 = vertices.by_name['b31'].vertex + b32 = vertices.by_name['b32'].vertex + b33 = vertices.by_name['b33'].vertex + + self.assertIsInstance(b31, Block) + self.assertIsInstance(b32, Block) + self.assertIsInstance(b33, Block) + self.assertIsNone(b31.get_metadata().voided_by) + self.assertIsNone(b32.get_metadata().voided_by) + self.assertIsNone(b33.get_metadata().voided_by) + + tx1 = vertices.by_name['tx1'].vertex + tx2 = vertices.by_name['tx2'].vertex + tx3 = vertices.by_name['tx3'].vertex + + meta1 = tx1.get_metadata() + meta2 = tx2.get_metadata() + meta3 = tx3.get_metadata() + + self.assertEqual(meta1.first_block, b31.hash) + self.assertEqual(meta2.first_block, b32.hash) + self.assertEqual(meta3.first_block, b33.hash) + + self.assertIsNone(meta1.voided_by) + self.assertEqual(meta2.voided_by, {tx2.hash, NC_EXECUTION_FAIL_ID}) + self.assertEqual(meta3.voided_by, {tx2.hash}) + + def test_reexecute_fail_on_reorg_different_blocks(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + # nc2 will fail because it does not have a deposit + nc2.nc_id = nc1 + nc2.nc_method = deposit() + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2 (which fails again). + # nc2 and nc3 are in different blocks. + b33 < a32 + nc2 <-- a32 + nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + found_b33 = False + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + if node.name == 'b33': + found_b33 = True + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + assert found_b33 + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a32.hash + assert nc3.get_metadata().first_block == a33.hash + + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + def test_reexecute_fail_on_reorg_same_block(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + # nc2 will fail because it does not have a deposit + nc2.nc_id = nc1 + nc2.nc_method = deposit() + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2 (which fails again). + # nc2 and nc3 are in the same block. + b33 < a32 + nc2 <-- nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + found_b33 = False + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + if node.name == 'b33': + found_b33 = True + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + assert found_b33 + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a33.hash + assert nc3.get_metadata().first_block == a33.hash + + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + def test_reexecute_success_on_reorg_different_blocks(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + nc1.nc_address = wallet1 + nc1.nc_seqnum = 1 + + # nc2 will fail because nc1.counter is 0 + nc2.nc_id = nc1 + nc2.nc_method = fail_on_zero() + nc2.nc_address = wallet1 + nc2.nc_seqnum = 3 # we skip 2 because nc4 will use it below + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc3.nc_address = wallet1 + nc3.nc_seqnum = 4 + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2. + # this time it succeeds because nc4 in the new chain increments nc1.counter to 1, before nc2. + # nc2 and nc3 are in different blocks. + + nc4.nc_id = nc1 + nc4.nc_method = nop(1) + nc4.nc_address = wallet1 + nc4.nc_seqnum = 2 + nc4 < nc2 + nc4 <-- a32 + + b33 < a32 + nc2 <-- a32 + nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3, nc4 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3', 'nc4'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + assert nc4.is_nano_contract() + + artifacts.propagate_with(self.manager, up_to='b33') + + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + assert nc4.get_metadata().first_block is None + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + artifacts.propagate_with(self.manager) + + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc3.get_metadata().voided_by is None + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a32.hash + assert nc3.get_metadata().first_block == a33.hash + assert nc4.get_metadata().first_block == a32.hash + + # increments by nc4 and nc3 + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 2 + + def test_reexecute_success_on_reorg_same_block(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + # nc2 will fail because nc1.counter is 0 + nc2.nc_id = nc1 + nc2.nc_method = fail_on_zero() + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2. + # this time it succeeds because nc4 in the new chain increments nc1.counter to 1, before nc2. + # nc2 and nc3 are in different blocks. + + nc4.nc_id = nc1 + nc4.nc_method = nop(1) + nc4 < nc2 + nc4 <-- a32 + + b33 < a32 + nc2 <-- nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3, nc4 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3', 'nc4'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + assert nc4.is_nano_contract() + + found_b33 = False + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + if node.name == 'b33': + found_b33 = True + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + assert nc4.get_metadata().first_block is None + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + assert found_b33 + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc3.get_metadata().voided_by is None + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a33.hash + assert nc3.get_metadata().first_block == a33.hash + assert nc4.get_metadata().first_block == a32.hash + + # increments by nc4 and nc3 + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 2 + + def test_back_to_mempool(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..32] + blockchain b31 a[32..34] + b30 < dummy + + a34.weight = 40 + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + nc1 <-- b32 + + # a34 will generate a reorg, moving nc1 back to mempool + b32 < a32 + ''') + + artifacts.propagate_with(self.manager) + + b32, a34 = artifacts.get_typed_vertices(['b32', 'a34'], Block) + nc1 = artifacts.get_typed_vertex('nc1', Transaction) + + assert b32.get_metadata().voided_by == {b32.hash} + assert a34.get_metadata().voided_by is None + + assert nc1.is_nano_contract() + nc1_meta = nc1.get_metadata() + + assert nc1_meta.first_block is None + assert nc1_meta.voided_by is None + assert nc1_meta.nc_execution is NCExecutionState.PENDING + assert nc1_meta.nc_calls is None + + def test_nc_consensus_voided_tx_propagation_to_blocks(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..50] + b30 < dummy + + tx1.nc_id = "{self.myblueprint_id.hex()}" + tx1.nc_method = initialize("00") + + tx2.nc_id = tx1 + tx2.nc_method = nop(1) + + # tx3 will fail because it does not have a deposit + tx3.nc_id = tx1 + tx3.nc_method = deposit() + + # tx4 will be voided because tx3 is voided + tx4.nc_id = tx1 + tx4.nc_method = nop(1) + tx2.out[0] <<< tx4 + tx3.out[0] <<< tx4 + + # As tx4 failed, tx5 is trying to spend the unspent output of tx2. + tx5.nc_id = tx1 + tx5.nc_method = nop(1) + tx2.out[0] <<< tx5 + + b31 --> tx1 + b32 --> tx2 + b33 --> tx3 + b34 --> tx4 + + b50 < tx5 + ''') + + artifacts.propagate_with(self.manager) + + tx1, tx2, tx3, tx4, tx5 = artifacts.get_typed_vertices(['tx1', 'tx2', 'tx3', 'tx4', 'tx5'], Transaction) + + assert tx1.get_metadata().voided_by is None + assert tx2.get_metadata().voided_by is None + assert tx3.get_metadata().voided_by == {tx3.hash, NC_EXECUTION_FAIL_ID} + assert tx4.get_metadata().voided_by == {tx3.hash, tx4.hash} + assert tx5.get_metadata().voided_by is None + + assert tx1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx2.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx3.get_metadata().nc_execution is NCExecutionState.FAILURE + assert tx4.get_metadata().nc_execution is NCExecutionState.SKIPPED + assert tx5.get_metadata().nc_execution is None + + b33, b34, b50 = artifacts.get_typed_vertices(['b33', 'b34', 'b50'], Block) + + self.assertIsNone(b33.get_metadata().voided_by) + self.assertIsNone(b34.get_metadata().voided_by) + self.assertIsNone(b50.get_metadata().voided_by) diff --git a/tests/nanocontracts/test_context.py b/tests/nanocontracts/test_context.py new file mode 100644 index 000000000..2251c40ff --- /dev/null +++ b/tests/nanocontracts/test_context.py @@ -0,0 +1,85 @@ +import copy + +from hathor.nanocontracts import Blueprint, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.vertex_data import NanoHeaderData, VertexData +from hathor.transaction import Block, Transaction +from hathor.transaction.base_transaction import TxVersion +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +GLOBAL_VERTEX_DATA: VertexData | None = None + + +class RememberVertexDataBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def remember_context(self, ctx: Context) -> None: + global GLOBAL_VERTEX_DATA + GLOBAL_VERTEX_DATA = copy.deepcopy(ctx.vertex) + + +class ContextTestCase(BlueprintTestCase): + def setUp(self) -> None: + global GLOBAL_VERTEX_DATA + + super().setUp() + + self.blueprint_id = self.gen_random_contract_id() + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.blueprint_id: RememberVertexDataBlueprint, + }) + self.address = self.gen_random_address() + + # clear vertex-data before and after + GLOBAL_VERTEX_DATA = None + + def tearDown(self) -> None: + global GLOBAL_VERTEX_DATA + + super().tearDown() + # clear vertex-data before and after + GLOBAL_VERTEX_DATA = None + + def test_vertex_data(self) -> None: + global GLOBAL_VERTEX_DATA + + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + nc1.nc_id = "{self.blueprint_id.hex()}" + nc1.nc_method = initialize() + nc1 <-- b11 + nc2.nc_id = nc1 + nc2.nc_method = remember_context() + nc1 <-- nc2 <-- b12 + ''') + artifacts.propagate_with(self.manager) + b12, = artifacts.get_typed_vertices(['b12'], Block) + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + + # this is the vertex data that was observed by nc2 when remember_context was called + assert GLOBAL_VERTEX_DATA is not None + vertex_data = copy.deepcopy(GLOBAL_VERTEX_DATA) + + # XXX: nonce varies, even for a weight of 1.0 + # XXX: inptus/outputs/parents ignored since the dag builder will pick whatever to fill it in + + self.assertEqual(vertex_data.version, TxVersion.REGULAR_TRANSACTION) + self.assertEqual(vertex_data.hash, nc2.hash) + self.assertEqual(vertex_data.signal_bits, 0) + self.assertEqual(vertex_data.weight, 1.0) + self.assertEqual(vertex_data.tokens, ()) + self.assertEqual(vertex_data.block.hash, b12.hash) + self.assertEqual(vertex_data.block.timestamp, b12.timestamp) + self.assertEqual(vertex_data.block.height, b12.get_height()) + nano_header_data, = vertex_data.headers + assert isinstance(nano_header_data, NanoHeaderData) + self.assertEqual(nano_header_data.nc_id, nc1.hash) + self.assertEqual(nano_header_data.nc_method, 'remember_context') + self.assertEqual(nano_header_data.nc_args_bytes, b'\x00') diff --git a/tests/nanocontracts/test_contract_create_contract.py b/tests/nanocontracts/test_contract_create_contract.py new file mode 100644 index 000000000..8cf15becb --- /dev/null +++ b/tests/nanocontracts/test_contract_create_contract.py @@ -0,0 +1,343 @@ +from typing import Optional + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import ( + BlueprintId, + ContractId, + NCActionType, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, + TokenUid, + VertexId, +) +from hathor.nanocontracts.utils import derive_child_contract_id +from hathor.transaction import Transaction, TxInput, TxOutput +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +INT_NC_TYPE = make_nc_type(int) +CONTRACT_NC_TYPE: NCType[ContractId | None] = make_nc_type(ContractId | None) # type: ignore[arg-type] + + +class MyBlueprint1(Blueprint): + counter: int + contract: Optional[ContractId] + token_uid: Optional[TokenUid] + + @public(allow_deposit=True, allow_grant_authority=True) + def initialize(self, ctx: Context, blueprint_id: BlueprintId, initial: int, token_uid: Optional[TokenUid]) -> None: + self.token_uid = token_uid + if initial > 0: + token_uid = TokenUid(HATHOR_TOKEN_UID) + action = ctx.get_single_action(token_uid) + salt = b'x' + assert isinstance(action, NCDepositAction) + new_actions = [NCDepositAction(token_uid=token_uid, amount=action.amount - initial)] + self.contract, _ = self.syscall.create_contract( + blueprint_id, salt, new_actions, blueprint_id, initial - 1, self.token_uid + ) + else: + self.contract = None + self.counter = initial + + @public + def create_children(self, ctx: Context, blueprint_id: BlueprintId, salt: bytes) -> None: + new_actions = [] + if self.token_uid and self.syscall.can_mint(self.token_uid): + new_actions.append(NCGrantAuthorityAction(token_uid=self.token_uid, mint=True, melt=True)) + self.syscall.create_contract(blueprint_id, salt + b'1', new_actions, blueprint_id, 0, self.token_uid) + self.syscall.create_contract(blueprint_id, salt + b'2', new_actions, blueprint_id, 0, self.token_uid) + self.syscall.create_contract(blueprint_id, salt + b'3', new_actions, blueprint_id, 0, self.token_uid) + + @public + def nop(self, ctx: Context) -> None: + pass + + @public(allow_deposit=True) + def mint(self, ctx: Context, amount: int) -> None: + assert self.token_uid is not None + self.syscall.mint_tokens(self.token_uid, amount) + + @public(allow_withdrawal=True) + def withdraw(self, ctx: Context) -> None: + pass + + +class MyBlueprint2(Blueprint): + counter: int + token_uid: Optional[TokenUid] + + @public(allow_grant_authority=True) + def initialize(self, ctx: Context, blueprint_id: BlueprintId, initial: int, token_uid: Optional[TokenUid]) -> None: + self.counter = initial + self.token_uid = token_uid + + @public + def melt(self, ctx: Context, amount: int, contract_id: ContractId) -> None: + assert self.token_uid is not None + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + self.syscall.call_public_method(contract_id, 'withdraw', [action]) + self.syscall.melt_tokens(self.token_uid, amount) + + +class NCBlueprintTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.blueprint1_id = self._register_blueprint_class(MyBlueprint1) + self.blueprint2_id = self._register_blueprint_class(MyBlueprint2) + + def test_basic(self) -> None: + counter = 5 + nc1_id = ContractId(VertexId(b'1' * 32)) + + token_uid = TokenUid(HATHOR_TOKEN_UID) + deposit = 100 + actions = [NCDepositAction(token_uid=token_uid, amount=deposit)] + address = self.gen_random_address() + ctx = Context(actions, self.get_genesis_tx(), address, timestamp=0) + self.runner.create_contract(nc1_id, self.blueprint1_id, ctx, self.blueprint1_id, counter, None) + + nc_id = nc1_id + expected = counter + remainder = deposit + while True: + nc_storage = self.runner.get_storage(nc_id) + counter = nc_storage.get_obj(b'counter', INT_NC_TYPE) + assert counter == expected + new_nc_id = nc_storage.get_obj(b'contract', CONTRACT_NC_TYPE) + balance = nc_storage.get_balance(token_uid) + if new_nc_id is not None: + expected_nc_id = derive_child_contract_id(nc_id, b'x', self.blueprint1_id) + assert new_nc_id == expected_nc_id + assert balance == Balance(value=expected, can_mint=False, can_melt=False) + remainder -= balance.value + else: + assert balance.value == remainder + break + nc_id = new_nc_id + expected -= 1 + + actions = [] + ctx = Context(actions, self.get_genesis_tx(), address, timestamp=0) + salt = b'123' + self.runner.call_public_method(nc1_id, 'create_children', ctx, self.blueprint1_id, salt) + child1_id = derive_child_contract_id(nc1_id, salt + b'1', self.blueprint1_id) + child2_id = derive_child_contract_id(nc1_id, salt + b'2', self.blueprint1_id) + child3_id = derive_child_contract_id(nc1_id, salt + b'3', self.blueprint1_id) + child4_id = derive_child_contract_id(nc1_id, salt + b'4', self.blueprint1_id) + + assert self.runner.has_contract_been_initialized(child1_id) + assert self.runner.has_contract_been_initialized(child2_id) + assert self.runner.has_contract_been_initialized(child3_id) + assert not self.runner.has_contract_been_initialized(child4_id) + + salt = b'456' + self.runner.call_public_method(child1_id, 'create_children', ctx, self.blueprint1_id, salt) + child1_child1_id = derive_child_contract_id(child1_id, salt + b'1', self.blueprint1_id) + assert self.runner.has_contract_been_initialized(child1_child1_id) + + def test_dag_basic(self) -> None: + salt1 = b'x' + salt11 = salt1 + b'1' + salt2 = b'1' + salt21 = salt2 + b'1' + + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..34] + blockchain b30 c[31..50] + b34 < c31 + b30 < dummy + + c31.weight = 6 + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize("{self.blueprint1_id.hex()}", 1, `TKA`) + nc1.nc_deposit = 10 HTR + nc1.out[0] = 200 TKA + + nc2.nc_id = nc1 + nc2.nc_method = create_children("{self.blueprint2_id.hex()}", "{salt1.hex()}") + + nc3.nc_id = child_contract(nc1, "{salt1.hex()}", "{self.blueprint1_id.hex()}") + nc3.nc_method = create_children("{self.blueprint1_id.hex()}", "{salt2.hex()}") + + nc4.nc_id = nc1 + nc4.nc_method = mint(456) + nc4.nc_deposit = 5 HTR + + nc5.nc_id = child_contract(nc3.nc_id, "{salt21.hex()}", "{self.blueprint1_id.hex()}") + nc5.nc_method = nop() + + nc6.nc_id = child_contract(nc2.nc_id, "{salt11.hex()}", "{self.blueprint2_id.hex()}") + nc6.nc_method = melt(123, `nc1`) + + nc1 <-- b31 + b31 < nc2 + nc2 <-- b32 + b32 < nc3 + nc3 <-- nc4 <-- b33 + b33 < nc5 + nc5 <-- nc6 <-- b34 + ''') + + nc1, nc2, nc3, nc4, nc5, nc6 = artifacts.get_typed_vertices( + ['nc1', 'nc2', 'nc3', 'nc4', 'nc5', 'nc6'], + Transaction, + ) + tka = artifacts.get_typed_vertex('TKA', TokenCreationTransaction) + + # TODO: The DAGBuilder currently doesn't support authority inputs/outputs, + # and neither authority actions, so we have to set them manually. Improve this. + nc1.inputs.append(TxInput(tx_id=tka.hash, index=len(tka.outputs) - 1, data=b'')) # melt authority + nc1.inputs.append(TxInput(tx_id=tka.hash, index=len(tka.outputs) - 2, data=b'')) # mint authority + dag_builder._exporter.sign_all_inputs(nc1) + nc1_header = nc1.get_nano_header() + assert len(nc1_header.nc_actions) == 1 + grant_action = NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, + token_index=1, + amount=TxOutput.ALL_AUTHORITIES, + ) + nc1_header.nc_actions.append(grant_action) + # XXX: Dirty hack, by purposefully not clearing the cache, we don't have to re-sign the nano header. + # nc1.clear_sighash_cache() + + artifacts.propagate_with(self.manager, up_to='b34') + + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc3.get_metadata().voided_by is None + assert nc4.get_metadata().voided_by is None + assert nc5.get_metadata().voided_by is None + assert nc6.get_metadata().voided_by is None + + nc1_contract_id = ContractId(VertexId(nc1.hash)) + + contracts = [] + # nc1 + contracts.append(nc1.hash) + contracts.append(derive_child_contract_id(nc1_contract_id, salt1, self.blueprint1_id)) + # nc2 + contracts.append(derive_child_contract_id(nc1_contract_id, salt1 + b'1', self.blueprint2_id)) + contracts.append(derive_child_contract_id(nc1_contract_id, salt1 + b'2', self.blueprint2_id)) + contracts.append(derive_child_contract_id(nc1_contract_id, salt1 + b'3', self.blueprint2_id)) + # nc3 + nc1_child1_contract_id = ContractId(VertexId(contracts[1])) + contracts.append(derive_child_contract_id(nc1_child1_contract_id, salt2 + b'1', self.blueprint1_id)) + contracts.append(derive_child_contract_id(nc1_child1_contract_id, salt2 + b'2', self.blueprint1_id)) + contracts.append(derive_child_contract_id(nc1_child1_contract_id, salt2 + b'3', self.blueprint1_id)) + # nc4, nc5, nc6 + # (empty) + + # Confirm that contract ids are different. + assert len(set(contracts)) == len(contracts) + + runner = self.manager.get_best_block_nc_runner() + for idx, nc_id in enumerate(contracts): + assert runner.has_contract_been_initialized(nc_id), f'index={idx}' + + indexes = self.manager.tx_storage.indexes + + # blueprint_history: blueprint1 + result = set(indexes.blueprint_history.get_newest(self.blueprint1_id)) + expected = {nc1.hash, nc3.hash} + assert result == expected + + # blueprint_history: blueprint2 + result = set(indexes.blueprint_history.get_newest(self.blueprint2_id)) + expected = {nc2.hash} + assert result == expected + + # nc_creation + result = set(indexes.nc_creation.get_newest()) + expected = {nc1.hash, nc2.hash, nc3.hash} + assert result == expected + + # tokens + htr_total = indexes.tokens.get_token_info(HATHOR_TOKEN_UID).get_total() + tka_total = indexes.tokens.get_token_info(tka.hash).get_total() + assert self.manager.tx_storage.get_height_best_block() == 34 + # genesis + # +34 blocks + # -2 from the TKA mint in nc1.out[0] + # -5 from the mint in nc5.nc_method + # +1 from the melt in nc6.nc_method + assert htr_total == self._settings.GENESIS_TOKENS + 34 * self._settings.INITIAL_TOKENS_PER_BLOCK - 2 - 5 + 1 + # 200 from nc1.out[0] + # +456 from nc5.nc_method + # -123 from nc6.nc_method + assert tka_total == 200 + 456 - 123 + + # nc_history + expected_list = [ + {nc1.hash, nc2.hash, nc4.hash, nc6.hash}, + {nc1.hash, nc3.hash}, + {nc2.hash, nc6.hash}, + {nc2.hash}, + {nc2.hash}, + {nc3.hash, nc5.hash}, + {nc3.hash}, + {nc3.hash}, + ] + assert len(contracts) == len(expected_list) + match_list = [] + for nc_id, expected in zip(contracts, expected_list): + result = set(indexes.nc_history.get_newest(nc_id)) + match_list.append(result == expected) + assert all(match_list) + + # Reorg! + artifacts.propagate_with(self.manager) + + runner = self.manager.get_best_block_nc_runner() + for nc_id in contracts: + assert not runner.has_contract_been_initialized(nc_id) + + # blueprint_history: blueprint1 + result = set(indexes.blueprint_history.get_newest(self.blueprint1_id)) + assert result == {nc1.hash} + + # blueprint_history: blueprint2 + result = set(indexes.blueprint_history.get_newest(self.blueprint2_id)) + assert result == set() + + # nc_creation + result = set(indexes.nc_creation.get_newest()) + assert result == {nc1.hash} + + # tokens + htr_total = indexes.tokens.get_token_info(HATHOR_TOKEN_UID).get_total() + tka_total = indexes.tokens.get_token_info(tka.hash).get_total() + assert self.manager.tx_storage.get_height_best_block() == 50 + # TODO: Is there a bug in the token index? It should be 50, not 54 blocks + # genesis + 50 blocks - 2 from the TKA mint in nc1.out[0] + assert htr_total == self._settings.GENESIS_TOKENS + 54 * self._settings.INITIAL_TOKENS_PER_BLOCK - 2 + # 200 from nc1.out[0] + assert tka_total == 200 + + # nc_history + expected_list = [ + {nc1.hash, nc2.hash, nc4.hash}, + {nc3.hash}, + {nc6.hash}, + set(), + set(), + {nc5.hash}, + set(), + set(), + ] + assert len(contracts) == len(expected_list) + match_list = [] + for nc_id, expected in zip(contracts, expected_list): + result = set(indexes.nc_history.get_newest(nc_id)) + match_list.append(result == expected) + assert all(match_list) + + # TODO Clean-up mempool after reorg? diff --git a/tests/nanocontracts/test_contract_upgrade.py b/tests/nanocontracts/test_contract_upgrade.py new file mode 100644 index 000000000..909947aef --- /dev/null +++ b/tests/nanocontracts/test_contract_upgrade.py @@ -0,0 +1,194 @@ +import pytest + +from hathor.nanocontracts import Blueprint, Context, fallback, public +from hathor.nanocontracts.exception import BlueprintDoesNotExist, NCFail, NCInvalidSyscall, NCMethodNotFound +from hathor.nanocontracts.types import BlueprintId, ContractId, NCAction, NCArgs +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class ProxyBlueprint(Blueprint): + counter: int + contract: ContractId + + @public + def initialize(self, ctx: Context, contract: ContractId) -> None: + self.counter = 0 + self.contract = contract + + @public + def set_contract(self, ctx: Context, contract: ContractId) -> None: + self.contract = contract + + @public + def upgrade_no_cb(self, ctx: Context, blueprint_id: BlueprintId) -> None: + self.syscall.change_blueprint(blueprint_id) + + @public + def upgrade(self, ctx: Context, blueprint_id: BlueprintId, method_name: str) -> None: + contract_id = self.syscall.get_contract_id() + self.syscall.change_blueprint(blueprint_id) + self.syscall.call_public_method(self.contract, 'on_upgrade', [], contract_id, method_name) + + @public + def on_upgrade(self, ctx: Context) -> None: + raise NCFail('oops') + + @public + def inc(self, ctx: Context) -> None: + actions: list[NCAction] = [] + blueprint_id = self.syscall.get_blueprint_id(self.contract) + self.syscall.proxy_call_public_method(blueprint_id, 'inc', actions) + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> None: + blueprint_id = self.syscall.get_blueprint_id(self.contract) + self.syscall.proxy_call_public_method_nc_args(blueprint_id, method_name, ctx.actions_list, nc_args) + + +class CodeBlueprint1(Blueprint): + counter: int + + @public + def initialize(self, ctx: Context) -> None: + self.counter = 0 + + @public + def inc(self, ctx: Context) -> None: + self.counter += 1 + + @public + def dec(self, ctx: Context) -> None: + self.counter -= 1 + + +class CodeBlueprint2(Blueprint): + counter: int + + @public + def initialize(self, ctx: Context) -> None: + self.counter = 0 + + @public + def inc(self, ctx: Context) -> None: + self.counter += 2 + + @public + def on_upgrade(self, ctx: Context, contract: ContractId, method_name: str) -> None: + self.syscall.call_public_method(contract, method_name, []) + + +class CodeBlueprint3(Blueprint): + counter: int + + @public + def initialize(self, ctx: Context) -> None: + self.counter = 0 + + @public + def inc(self, ctx: Context) -> None: + self.counter += 3 + + @public + def on_upgrade_inc(self, ctx: Context) -> None: + self.counter += 100 + + @public + def on_upgrade_fail(self, ctx: Context) -> None: + self.counter += 200 + raise NCFail('revert it all') + + +class NCDelegateCallTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.proxy_bp_id = self._register_blueprint_class(ProxyBlueprint) + self.code1_bp_id = self._register_blueprint_class(CodeBlueprint1) + self.code2_bp_id = self._register_blueprint_class(CodeBlueprint2) + self.code3_bp_id = self._register_blueprint_class(CodeBlueprint3) + + def test_basic(self) -> None: + code1_id = self.gen_random_contract_id() + code2_id = self.gen_random_contract_id() + proxy_id = self.gen_random_contract_id() + + tx = self.get_genesis_tx() + address = self.gen_random_address() + ctx = Context(actions=[], vertex=tx, address=address, timestamp=0) + + self.runner.create_contract(code1_id, self.code1_bp_id, ctx) + self.runner.create_contract(code2_id, self.code2_bp_id, ctx) + self.runner.create_contract(proxy_id, self.proxy_bp_id, ctx, code1_id) + + proxy_storage = self.runner.get_storage(proxy_id) + + code1_contract = self.get_readonly_contract(code1_id) + assert isinstance(code1_contract, CodeBlueprint1) + code2_contract = self.get_readonly_contract(code2_id) + assert isinstance(code2_contract, CodeBlueprint2) + proxy_contract = self.get_readonly_contract(proxy_id) + assert isinstance(proxy_contract, ProxyBlueprint) + + self.runner.call_public_method(proxy_id, 'set_contract', ctx, proxy_id) + with pytest.raises(NCInvalidSyscall, match='cannot call the same blueprint'): + self.runner.call_public_method(proxy_id, 'inc', ctx) + + self.runner.call_public_method(proxy_id, 'set_contract', ctx, code1_id) + self.runner.call_public_method(proxy_id, 'inc', ctx) + assert proxy_storage.get_blueprint_id() == self.proxy_bp_id + assert proxy_contract.contract == code1_id + assert code1_contract.counter == 0 + assert code2_contract.counter == 0 + assert proxy_contract.counter == 1 + + # it should invoke the fallback method which will call `dec()` from code1's blueprint. + self.runner.call_public_method(proxy_id, 'dec', ctx) + assert proxy_storage.get_blueprint_id() == self.proxy_bp_id + assert proxy_contract.contract == code1_id + assert code1_contract.counter == 0 + assert code2_contract.counter == 0 + assert proxy_contract.counter == 0 + + self.runner.call_public_method(proxy_id, 'set_contract', ctx, code1_id) + self.runner.call_public_method(proxy_id, 'inc', ctx) + assert proxy_storage.get_blueprint_id() == self.proxy_bp_id + assert proxy_contract.contract == code1_id + assert code1_contract.counter == 0 + assert code2_contract.counter == 0 + assert proxy_contract.counter == 1 + + with pytest.raises(NCFail): + self.runner.call_public_method(proxy_id, 'upgrade', ctx, self.code3_bp_id, 'on_upgrade_fail') + assert proxy_storage.get_blueprint_id() == self.proxy_bp_id + assert proxy_contract.counter == 1 + + self.runner.call_public_method(proxy_id, 'set_contract', ctx, code2_id) + self.runner.call_public_method(proxy_id, 'inc', ctx) + assert proxy_storage.get_blueprint_id() == self.proxy_bp_id + assert proxy_contract.contract == code2_id + assert code1_contract.counter == 0 + assert code2_contract.counter == 0 + assert proxy_contract.counter == 3 + + # it should invoke the fallback method which will fail calling `dec()` from code2's blueprint. + with pytest.raises(NCMethodNotFound, match='method `dec` not found and no fallback is provided'): + self.runner.call_public_method(proxy_id, 'dec', ctx) + assert proxy_storage.get_blueprint_id() == self.proxy_bp_id + assert proxy_contract.contract == code2_id + assert code1_contract.counter == 0 + assert code2_contract.counter == 0 + assert proxy_contract.counter == 3 + + unknown_bp_id = self.gen_random_blueprint_id() + with pytest.raises(BlueprintDoesNotExist): + self.runner.call_public_method(proxy_id, 'upgrade_no_cb', ctx, unknown_bp_id) + + self.runner.call_public_method(proxy_id, 'upgrade', ctx, self.code3_bp_id, 'on_upgrade_inc') + assert proxy_storage.get_blueprint_id() == self.code3_bp_id + assert proxy_contract.counter == 103 + + self.runner.call_public_method(proxy_id, 'inc', ctx) + # Even though 'contract' field does not exist in CodeBlueprint3, its value still exists in the storage. + assert proxy_contract.contract == code2_id + assert code1_contract.counter == 0 + assert code2_contract.counter == 0 + assert proxy_contract.counter == 106 diff --git a/tests/nanocontracts/test_custom_import.py b/tests/nanocontracts/test_custom_import.py new file mode 100644 index 000000000..6197dbf36 --- /dev/null +++ b/tests/nanocontracts/test_custom_import.py @@ -0,0 +1,71 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from io import StringIO +from textwrap import dedent +from unittest.mock import ANY, Mock, call + +from hathor.nanocontracts.custom_builtins import EXEC_BUILTINS +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class TestCustomImport(BlueprintTestCase): + def test_custom_import(self) -> None: + """Guarantee our custom import function is being called, instead of the builtin one.""" + contract_id = self.gen_random_contract_id() + blueprint = ''' + from hathor.nanocontracts import Blueprint + from hathor.nanocontracts.context import Context + from hathor.nanocontracts.types import public + + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + from math import ceil, floor + from collections import OrderedDict + from hathor.nanocontracts.exception import NCFail + from hathor.nanocontracts.types import NCAction, NCActionType + + __blueprint__ = MyBlueprint + ''' + + # Wrap our custom builtin so we can spy its calls + wrapped_import_function = Mock(wraps=EXEC_BUILTINS['__import__']) + EXEC_BUILTINS['__import__'] = wrapped_import_function + + # Before being used, the function is uncalled + wrapped_import_function.assert_not_called() + + # During blueprint registration, the function is called for each import at the module level. + # This happens twice, once during verification and once during the actual registration. + blueprint_id = self.register_blueprint_contents(StringIO(dedent(blueprint))) + module_level_calls = [ + call('hathor.nanocontracts', ANY, ANY, ('Blueprint',), 0), + call('hathor.nanocontracts.context', ANY, ANY, ('Context',), 0), + call('hathor.nanocontracts.types', ANY, ANY, ('public',), 0), + ] + assert wrapped_import_function.call_count == 2 * len(module_level_calls) + wrapped_import_function.assert_has_calls(2 * module_level_calls) + wrapped_import_function.reset_mock() + + # During the call to initialize(), the function is called for each import on that method. + self.runner.create_contract(contract_id, blueprint_id, self.create_context()) + method_level_imports = [ + call('math', ANY, ANY, ('ceil', 'floor'), 0), + call('collections', ANY, ANY, ('OrderedDict',), 0), + call('hathor.nanocontracts.exception', ANY, ANY, ('NCFail',), 0), + call('hathor.nanocontracts.types', ANY, ANY, ('NCAction', 'NCActionType'), 0), + ] + assert wrapped_import_function.call_count == len(method_level_imports) + wrapped_import_function.assert_has_calls(method_level_imports) diff --git a/tests/nanocontracts/test_execution_order.py b/tests/nanocontracts/test_execution_order.py new file mode 100644 index 000000000..4d8d83c3d --- /dev/null +++ b/tests/nanocontracts/test_execution_order.py @@ -0,0 +1,170 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.types import ( + ContractId, + NCAction, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, + TokenUid, +) +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + token_uid: TokenUid + + @public(allow_deposit=True) + def initialize(self, ctx: Context, token_uid: TokenUid) -> None: + self.token_uid = token_uid + + def assert_balance(self, token_uid: TokenUid, *, before: int, current: int) -> None: + assert self.syscall.get_balance_before_current_call(token_uid) == before + assert self.syscall.get_current_balance(token_uid) == current + + def assert_token_balance(self, *, before: int, current: int) -> None: + self.assert_balance(self.token_uid, before=before, current=current) + + def assert_htr_balance(self, *, before: int, current: int) -> None: + self.assert_balance(TokenUid(HATHOR_TOKEN_UID), before=before, current=current) + + @public(allow_deposit=True) + def deposit(self, ctx: Context) -> None: + self.assert_htr_balance(before=10, current=10) + self.assert_token_balance(before=0, current=10) + + @public(allow_withdrawal=True) + def withdrawal(self, ctx: Context) -> None: + self.assert_htr_balance(before=10, current=10) + self.assert_token_balance(before=10, current=7) + + @public(allow_grant_authority=True) + def mint(self, ctx: Context) -> None: + self.assert_htr_balance(before=10, current=10) + self.assert_token_balance(before=0, current=0) + self.syscall.mint_tokens(self.token_uid, amount=300) + self.assert_htr_balance(before=10, current=7) + self.assert_token_balance(before=0, current=300) + + assert not self.syscall.can_mint_before_current_call(self.token_uid) + assert self.syscall.can_mint(self.token_uid) + self.syscall.revoke_authorities(self.token_uid, revoke_mint=True, revoke_melt=False) + assert not self.syscall.can_mint_before_current_call(self.token_uid) + assert not self.syscall.can_mint(self.token_uid) + + @public(allow_grant_authority=True) + def melt(self, ctx: Context) -> None: + self.assert_htr_balance(before=7, current=7) + self.assert_token_balance(before=300, current=300) + self.syscall.melt_tokens(self.token_uid, amount=200) + self.assert_htr_balance(before=7, current=9) + self.assert_token_balance(before=300, current=100) + + assert not self.syscall.can_melt_before_current_call(self.token_uid) + assert self.syscall.can_melt(self.token_uid) + self.syscall.revoke_authorities(self.token_uid, revoke_mint=False, revoke_melt=True) + assert not self.syscall.can_melt_before_current_call(self.token_uid) + assert not self.syscall.can_melt(self.token_uid) + + @public(allow_deposit=True) + def deposit_into_another(self, ctx: Context, contract_id: ContractId) -> None: + self.assert_token_balance(before=0, current=10) + action = NCDepositAction(token_uid=self.token_uid, amount=7) + self.syscall.call_public_method( + contract_id, 'accept_deposit_from_another', [action], self.syscall.get_contract_id() + ) + self.assert_token_balance(before=0, current=6) + + @public(allow_deposit=True) + def accept_deposit_from_another(self, ctx: Context, contract_id: ContractId) -> None: + self.assert_token_balance(before=0, current=7) + action = NCDepositAction(token_uid=self.token_uid, amount=3) + self.syscall.call_public_method(contract_id, 'accept_deposit_from_another_callback', [action]) + self.assert_token_balance(before=0, current=4) + + @public(allow_deposit=True) + def accept_deposit_from_another_callback(self, ctx: Context) -> None: + self.assert_token_balance(before=3, current=6) + + @public(allow_withdrawal=True) + def withdraw_from_another(self, ctx: Context, contract_id: ContractId) -> None: + self.assert_token_balance(before=6, current=5) + action = NCWithdrawalAction(token_uid=self.token_uid, amount=2) + self.syscall.call_public_method( + contract_id, 'accept_withdrawal_from_another', [action], self.syscall.get_contract_id() + ) + self.assert_token_balance(before=6, current=6) + + @public(allow_withdrawal=True) + def accept_withdrawal_from_another(self, ctx: Context, contract_id: ContractId) -> None: + self.assert_token_balance(before=4, current=2) + action = NCWithdrawalAction(token_uid=self.token_uid, amount=1) + self.syscall.call_public_method(contract_id, 'accept_withdrawal_from_another_callback', [action]) + self.assert_token_balance(before=4, current=3) + + @public(allow_withdrawal=True) + def accept_withdrawal_from_another_callback(self, ctx: Context) -> None: + self.assert_token_balance(before=7, current=6) + + +class TestExecutionOrder(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.contract_id1 = self.gen_random_contract_id() + self.contract_id2 = self.gen_random_contract_id() + self.token_a = self.gen_random_token_uid() + self.tx = self.get_genesis_tx() + self.address = self.gen_random_address() + + action = NCDepositAction(token_uid=TokenUid(HATHOR_TOKEN_UID), amount=10) + self.runner.create_contract(self.contract_id1, self.blueprint_id, self._get_context(action), self.token_a) + self.runner.create_contract(self.contract_id2, self.blueprint_id, self._get_context(action), self.token_a) + + def _get_context(self, *actions: NCAction) -> Context: + return Context( + actions=list(actions), + vertex=self.tx, + address=self.address, + timestamp=self.now, + ) + + def test_deposit_and_withdrawal(self) -> None: + action: NCAction = NCDepositAction(token_uid=self.token_a, amount=10) + self.runner.call_public_method(self.contract_id1, 'deposit', self._get_context(action)) + + action = NCWithdrawalAction(token_uid=self.token_a, amount=3) + self.runner.call_public_method(self.contract_id1, 'withdrawal', self._get_context(action)) + + def test_mint_and_melt(self) -> None: + action: NCAction = NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=False) + self.runner.call_public_method(self.contract_id1, 'mint', self._get_context(action)) + + action = NCGrantAuthorityAction(token_uid=self.token_a, mint=False, melt=True) + self.runner.call_public_method(self.contract_id1, 'melt', self._get_context(action)) + + def test_deposit_and_withdrawal_across_contracts(self) -> None: + action: NCAction = NCDepositAction(token_uid=self.token_a, amount=10) + self.runner.call_public_method( + self.contract_id1, 'deposit_into_another', self._get_context(action), self.contract_id2 + ) + + action = NCWithdrawalAction(token_uid=self.token_a, amount=1) + self.runner.call_public_method( + self.contract_id1, 'withdraw_from_another', self._get_context(action), self.contract_id2 + ) diff --git a/tests/nanocontracts/test_execution_verification.py b/tests/nanocontracts/test_execution_verification.py new file mode 100644 index 000000000..506f6d418 --- /dev/null +++ b/tests/nanocontracts/test_execution_verification.py @@ -0,0 +1,92 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.exception import ( + BlueprintDoesNotExist, + NCFail, + NCMethodNotFound, + NCUninitializedContractError, +) +from hathor.nanocontracts.method import ArgsOnly +from hathor.nanocontracts.types import NCRawArgs +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context, a: int) -> None: + pass + + +class TestExecutionVerification(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.contract_id = self.gen_random_contract_id() + + def test_blueprint_does_not_exist(self) -> None: + with pytest.raises(BlueprintDoesNotExist): + self.runner.create_contract(self.contract_id, self.gen_random_blueprint_id(), self.create_context(), 123) + + def test_contract_does_not_exist(self) -> None: + with pytest.raises(NCUninitializedContractError): + self.runner.call_public_method(self.gen_random_contract_id(), 'method', self.create_context()) + + def test_method_not_found(self) -> None: + self.runner.create_contract(self.contract_id, self.blueprint_id, self.create_context(), 123) + + with pytest.raises(NCMethodNotFound): + self.runner.call_public_method(self.contract_id, 'not_found', self.create_context()) + + def test_empty_args(self) -> None: + with pytest.raises(NCFail, match=re.escape("initialize() missing required argument: 'a'")): + self.runner.create_contract(self.contract_id, self.blueprint_id, self.create_context()) + + def test_too_many_args(self) -> None: + with pytest.raises(NCFail, match='too many arguments'): + self.runner.create_contract(self.contract_id, self.blueprint_id, self.create_context(), 123, 456) + + def test_wrong_arg_type_parsed(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.create_contract(self.contract_id, self.blueprint_id, self.create_context(), 'abc') + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_wrong_arg_type_raw(self) -> None: + args_parser = ArgsOnly.from_arg_types((str,)) + args_bytes = args_parser.serialize_args_bytes(('abc',)) + nc_args = NCRawArgs(args_bytes) + + with pytest.raises(NCFail) as e: + self.runner.create_contract_with_nc_args( + self.contract_id, self.blueprint_id, self.create_context(), nc_args + ) + assert isinstance(e.value.__cause__, ValueError) + assert e.value.__cause__.args[0] == 'trailing data' + + @pytest.mark.xfail(strict=True, reason='not implemented yet') + def test_wrong_arg_type_but_valid_serialization(self) -> None: + args_parser = ArgsOnly.from_arg_types((str,)) + args_bytes = args_parser.serialize_args_bytes(('',)) + nc_args = NCRawArgs(args_bytes) + + with pytest.raises(NCFail): + self.runner.create_contract_with_nc_args( + self.contract_id, self.blueprint_id, self.create_context(), nc_args + ) diff --git a/tests/nanocontracts/test_exposed_properties.py b/tests/nanocontracts/test_exposed_properties.py new file mode 100644 index 000000000..2240b0ea8 --- /dev/null +++ b/tests/nanocontracts/test_exposed_properties.py @@ -0,0 +1,346 @@ +from collections.abc import Iterator +from sys import version_info +from types import MethodType +from typing import Any + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.custom_builtins import EXEC_BUILTINS +from hathor.nanocontracts.on_chain_blueprint import ALLOWED_IMPORTS +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +MAX_DEPTH = 20 +NEW_PROP_NAME = 'some_new_attribute' + +# XXX: if KNOWN_CASES is not empty then there is a bug +KNOWN_CASES = [ + 'MyBlueprint.check', + 'MyBlueprint.initialize', + 'MyBlueprint.log', + 'MyBlueprint.some_new_attribute', + 'MyBlueprint.syscall', + 'aiter.some_new_attribute', + 'all.some_new_attribute', + 'anext.some_new_attribute', + 'any.some_new_attribute', + 'ascii.some_new_attribute', + 'breakpoint.some_new_attribute', + 'compile.some_new_attribute', + 'copyright.some_new_attribute', + 'credits.some_new_attribute', + 'ctx.actions_list', + 'delattr.some_new_attribute', + 'dir.some_new_attribute', + 'enumerate.some_new_attribute', + 'eval.some_new_attribute', + 'exec.some_new_attribute', + 'exit.eof', + 'exit.name', + 'exit.some_new_attribute', + 'getattr.some_new_attribute', + 'globals.some_new_attribute', + 'hasattr.some_new_attribute', + 'hathor.nanocontracts.Blueprint.log', + 'hathor.nanocontracts.Blueprint.some_new_attribute', + 'hathor.nanocontracts.Blueprint.syscall', + 'hathor.nanocontracts.blueprint.Blueprint.log', + 'hathor.nanocontracts.blueprint.Blueprint.some_new_attribute', + 'hathor.nanocontracts.blueprint.Blueprint.syscall', + 'hathor.nanocontracts.context.Context.actions', + 'hathor.nanocontracts.context.Context.actions_list', + 'hathor.nanocontracts.context.Context.address', + 'hathor.nanocontracts.context.Context.copy', + 'hathor.nanocontracts.context.Context.get_single_action', + 'hathor.nanocontracts.context.Context.some_new_attribute', + 'hathor.nanocontracts.context.Context.timestamp', + 'hathor.nanocontracts.context.Context.to_json', + 'hathor.nanocontracts.context.Context.vertex', + 'hathor.nanocontracts.exception.NCFail.add_note', + 'hathor.nanocontracts.exception.NCFail.args', + 'hathor.nanocontracts.exception.NCFail.some_new_attribute', + 'hathor.nanocontracts.exception.NCFail.with_traceback', + 'hathor.nanocontracts.types.Address.some_new_attribute', + 'hathor.nanocontracts.types.Amount.some_new_attribute', + 'hathor.nanocontracts.types.BlueprintId.some_new_attribute', + 'hathor.nanocontracts.types.ContractId.some_new_attribute', + 'hathor.nanocontracts.types.NCAcquireAuthorityAction.melt', + 'hathor.nanocontracts.types.NCAcquireAuthorityAction.mint', + 'hathor.nanocontracts.types.NCAcquireAuthorityAction.name', + 'hathor.nanocontracts.types.NCAcquireAuthorityAction.some_new_attribute', + 'hathor.nanocontracts.types.NCAcquireAuthorityAction.to_json', + 'hathor.nanocontracts.types.NCAcquireAuthorityAction.token_uid', + 'hathor.nanocontracts.types.NCAcquireAuthorityAction.type', + 'hathor.nanocontracts.types.NCActionType.ACQUIRE_AUTHORITY._name_', + 'hathor.nanocontracts.types.NCActionType.ACQUIRE_AUTHORITY._sort_order_', + 'hathor.nanocontracts.types.NCActionType.ACQUIRE_AUTHORITY._value_', + 'hathor.nanocontracts.types.NCActionType.ACQUIRE_AUTHORITY.from_bytes', + 'hathor.nanocontracts.types.NCActionType.ACQUIRE_AUTHORITY.some_new_attribute', + 'hathor.nanocontracts.types.NCActionType.ACQUIRE_AUTHORITY.to_bytes', + 'hathor.nanocontracts.types.NCActionType.DEPOSIT._name_', + 'hathor.nanocontracts.types.NCActionType.DEPOSIT._sort_order_', + 'hathor.nanocontracts.types.NCActionType.DEPOSIT._value_', + 'hathor.nanocontracts.types.NCActionType.DEPOSIT.from_bytes', + 'hathor.nanocontracts.types.NCActionType.DEPOSIT.some_new_attribute', + 'hathor.nanocontracts.types.NCActionType.DEPOSIT.to_bytes', + 'hathor.nanocontracts.types.NCActionType.GRANT_AUTHORITY._name_', + 'hathor.nanocontracts.types.NCActionType.GRANT_AUTHORITY._sort_order_', + 'hathor.nanocontracts.types.NCActionType.GRANT_AUTHORITY._value_', + 'hathor.nanocontracts.types.NCActionType.GRANT_AUTHORITY.from_bytes', + 'hathor.nanocontracts.types.NCActionType.GRANT_AUTHORITY.some_new_attribute', + 'hathor.nanocontracts.types.NCActionType.GRANT_AUTHORITY.to_bytes', + 'hathor.nanocontracts.types.NCActionType.WITHDRAWAL._name_', + 'hathor.nanocontracts.types.NCActionType.WITHDRAWAL._sort_order_', + 'hathor.nanocontracts.types.NCActionType.WITHDRAWAL._value_', + 'hathor.nanocontracts.types.NCActionType.WITHDRAWAL.from_bytes', + 'hathor.nanocontracts.types.NCActionType.WITHDRAWAL.some_new_attribute', + 'hathor.nanocontracts.types.NCActionType.WITHDRAWAL.to_bytes', + 'hathor.nanocontracts.types.NCActionType._generate_next_value_', + 'hathor.nanocontracts.types.NCActionType._member_map_', + 'hathor.nanocontracts.types.NCActionType._member_names_', + 'hathor.nanocontracts.types.NCActionType._member_type_', + 'hathor.nanocontracts.types.NCActionType._new_member_', + 'hathor.nanocontracts.types.NCActionType._unhashable_values_', + 'hathor.nanocontracts.types.NCActionType._use_args_', + 'hathor.nanocontracts.types.NCActionType._value2member_map_', + 'hathor.nanocontracts.types.NCActionType._value_repr_', + 'hathor.nanocontracts.types.NCActionType.from_bytes', + 'hathor.nanocontracts.types.NCActionType.some_new_attribute', + 'hathor.nanocontracts.types.NCActionType.to_bytes', + 'hathor.nanocontracts.types.NCDepositAction.amount', + 'hathor.nanocontracts.types.NCDepositAction.name', + 'hathor.nanocontracts.types.NCDepositAction.some_new_attribute', + 'hathor.nanocontracts.types.NCDepositAction.to_json', + 'hathor.nanocontracts.types.NCDepositAction.token_uid', + 'hathor.nanocontracts.types.NCDepositAction.type', + 'hathor.nanocontracts.types.NCGrantAuthorityAction.melt', + 'hathor.nanocontracts.types.NCGrantAuthorityAction.mint', + 'hathor.nanocontracts.types.NCGrantAuthorityAction.name', + 'hathor.nanocontracts.types.NCGrantAuthorityAction.some_new_attribute', + 'hathor.nanocontracts.types.NCGrantAuthorityAction.to_json', + 'hathor.nanocontracts.types.NCGrantAuthorityAction.token_uid', + 'hathor.nanocontracts.types.NCGrantAuthorityAction.type', + 'hathor.nanocontracts.types.NCParsedArgs.args', + 'hathor.nanocontracts.types.NCParsedArgs.kwargs', + 'hathor.nanocontracts.types.NCParsedArgs.some_new_attribute', + 'hathor.nanocontracts.types.NCRawArgs.args_bytes', + 'hathor.nanocontracts.types.NCRawArgs.some_new_attribute', + 'hathor.nanocontracts.types.NCRawArgs.try_parse_as', + 'hathor.nanocontracts.types.NCWithdrawalAction.amount', + 'hathor.nanocontracts.types.NCWithdrawalAction.name', + 'hathor.nanocontracts.types.NCWithdrawalAction.some_new_attribute', + 'hathor.nanocontracts.types.NCWithdrawalAction.to_json', + 'hathor.nanocontracts.types.NCWithdrawalAction.token_uid', + 'hathor.nanocontracts.types.NCWithdrawalAction.type', + 'hathor.nanocontracts.types.SignedData._get_raw_signed_data', + 'hathor.nanocontracts.types.SignedData.checksig', + 'hathor.nanocontracts.types.SignedData.get_data_bytes', + 'hathor.nanocontracts.types.SignedData.some_new_attribute', + 'hathor.nanocontracts.types.Timestamp.some_new_attribute', + 'hathor.nanocontracts.types.TokenUid.some_new_attribute', + 'hathor.nanocontracts.types.TxOutputScript.some_new_attribute', + 'hathor.nanocontracts.types.VertexId.some_new_attribute', + 'hathor.nanocontracts.types.fallback.some_new_attribute', + 'hathor.nanocontracts.types.public.some_new_attribute', + 'hathor.nanocontracts.types.view.some_new_attribute', + 'help.some_new_attribute', + 'id.some_new_attribute', + 'input.some_new_attribute', + 'issubclass.some_new_attribute', + 'license.some_new_attribute', + 'locals.some_new_attribute', + 'memoryview.c_contiguous', + 'memoryview.cast', + 'memoryview.contiguous', + 'memoryview.f_contiguous', + 'memoryview.format', + 'memoryview.hex', + 'memoryview.itemsize', + 'memoryview.nbytes', + 'memoryview.ndim', + 'memoryview.obj', + 'memoryview.readonly', + 'memoryview.release', + 'memoryview.shape', + 'memoryview.some_new_attribute', + 'memoryview.strides', + 'memoryview.suboffsets', + 'memoryview.tobytes', + 'memoryview.tolist', + 'memoryview.toreadonly', + 'object.some_new_attribute', + 'open.some_new_attribute', + 'print.some_new_attribute', + 'property.deleter', + 'property.fdel', + 'property.fget', + 'property.fset', + 'property.getter', + 'property.setter', + 'property.some_new_attribute', + 'quit.eof', + 'quit.name', + 'quit.some_new_attribute', + 'range._getitem_int', + 'range._getitem_slice', + 'range._start', + 'range._step', + 'range._stop', + 'range.count', + 'range.index', + 'range.some_new_attribute', + 'range.start', + 'range.step', + 'range.stop', + 'repr.some_new_attribute', + 'setattr.some_new_attribute', + 'super.some_new_attribute', + 'type.mro', + 'type.some_new_attribute', + 'vars.some_new_attribute', +] + +# XXX: these only appear in Python 3.11 +if version_info[1] == 11: + KNOWN_CASES.extend([ + 'hathor.nanocontracts.types.SignedData._is_protocol', + ]) + +# XXX: these only appear in Python 3.12 +if version_info[1] == 12: + KNOWN_CASES.extend([ + 'memoryview._from_flags', + ]) + +KNOWN_CASES.sort() + + +def is_writeable(obj: object, prop_name: str, value: Any) -> bool: + """ Returns True if `obj.prop_name = value` succeeds.""" + if has_value := hasattr(obj, prop_name): + orig_value = getattr(obj, prop_name) + try: + # try to overwrite the attribute + setattr(obj, prop_name, value) + # try to delete the attribute + delattr(obj, prop_name) + # restore original value if it had one + if has_value: + setattr(obj, prop_name, orig_value) + except AttributeError: + return False + except TypeError: + return False + else: + return True + + +def check_property_writeable(obj: object, prop_name: str) -> tuple[bool, object | None]: + """ Checks the property value and returns a tuple (writeable: bool, possible_object: object | None). + + The first value, `writeable: bool`, tells whether the property is writeable or not. + + The second value, `possible_object: object | None` is the value to be used to continue the recursive check, if it's + `None` there is no need to continue. Note: the value itself could be `None`, and we don't differentiate, we just + don't continue the search eitherway. + """ + prop_value = getattr(obj, prop_name) + match prop_value: + case list(): + # XXX: lists are inherently mutable and shouldn't be exposed + prop_value.append(object()) + # XXX: is_writeable not called since True is always returned, but it's technically independant + return True, None + case dict(): + # XXX: dicts are inherently mutable and shouldn't be exposed + prop_value[None] = object() + # XXX: is_writeable not called since True is always returned, but it's technically independant + return True, None + case int(): + # XXX: no need to deep into int's properties + return is_writeable(obj, prop_name, 999), None + case str(): + # XXX: no need to deep into str's properties + return is_writeable(obj, prop_name, 'foobar'), None + case bytes(): + # XXX: no need to deep into bytes' properties + return is_writeable(obj, prop_name, b'foobar'), None + case tuple(): + # XXX: no need to deep into tuple's properties + return is_writeable(obj, prop_name, ()), None + case MethodType(): + # XXX: no need to deep into a method's properties + return is_writeable(obj, prop_name, lambda: 'foo'), None + case _ as value: + return is_writeable(obj, prop_name, object()), value + + +def should_skip_attr(prop_name: str) -> bool: + """Used to simulate AST restrictions and prevent loops.""" + return '__' in prop_name + + +def _search_writeable_properties(obj: object, *, path: tuple[str, ...], available_depth: int) -> Iterator[str]: + if available_depth <= 0: + assert 'MAX_DEPTH is not high enough to traverse everything' + all_names = set(dir(obj)) | set(getattr(obj, '__dict__', ())) | set(getattr(obj, '__slots__', ())) + prop_names = [prop_name for prop_name in all_names if not should_skip_attr(prop_name)] + available_depth -= 1 + for prop_name in prop_names: + next_path = path + (prop_name,) + prop_path = '.'.join(path + (prop_name,)) + prop_writeable, prop_value = check_property_writeable(obj, prop_name) + if prop_writeable: + yield prop_path + else: + if prop_value is not None: + yield from _search_writeable_properties(prop_value, path=next_path, available_depth=available_depth) + if is_writeable(obj, NEW_PROP_NAME, object()): + yield '.'.join(path + (NEW_PROP_NAME,)) + + +def search_writeable_properties(obj: object, obj_name: str, /) -> Iterator[str]: + """Searches for and returns a list of writeable properties, nested properties are joined with '.'""" + yield from _search_writeable_properties(obj, path=(obj_name,), available_depth=MAX_DEPTH) + + +class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def check(self, ctx: Context) -> list[str]: + mutable_props: list[str] = [] + mutable_props.extend(search_writeable_properties(MyBlueprint, 'MyBlueprint')) + mutable_props.extend(search_writeable_properties(self, 'self')) + mutable_props.extend(search_writeable_properties(ctx, 'ctx')) + custom_import = EXEC_BUILTINS['__import__'] + for module_name, import_names in ALLOWED_IMPORTS.items(): + if module_name == 'typing': + # FIXME: typing module causes problems for some reason + continue + module = custom_import(module_name, fromlist=list(import_names)) + for import_name in import_names: + obj = getattr(module, import_name) + obj_name = f'{module_name}.{import_name}' + mutable_props.extend(search_writeable_properties(obj, obj_name)) + for builtin_name, builtin_obj in EXEC_BUILTINS.items(): + if should_skip_attr(builtin_name): + continue + mutable_props.extend(search_writeable_properties(builtin_obj, builtin_name)) + return mutable_props + + +class TestMutableAttributes(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.contract_id = self.gen_random_contract_id() + self.runner.create_contract(self.contract_id, self.blueprint_id, self.create_context()) + + def test_search_mutable_properties(self) -> None: + mutable_props = sorted(self.runner.call_public_method(self.contract_id, 'check', self.create_context())) + debug = False + if debug: + for prop in mutable_props: + print(f" '{prop}',") + self.assertEqual(mutable_props, KNOWN_CASES) diff --git a/tests/nanocontracts/test_fallback_method.py b/tests/nanocontracts/test_fallback_method.py new file mode 100644 index 000000000..30c7f2242 --- /dev/null +++ b/tests/nanocontracts/test_fallback_method.py @@ -0,0 +1,218 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import assert_never +from unittest.mock import ANY + +import pytest + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, NCFail, public +from hathor.nanocontracts.exception import NCError, NCInvalidMethodCall +from hathor.nanocontracts.method import ArgsOnly +from hathor.nanocontracts.nc_exec_logs import NCCallBeginEntry, NCCallEndEntry +from hathor.nanocontracts.runner.types import CallType +from hathor.nanocontracts.types import ContractId, NCArgs, NCDepositAction, NCParsedArgs, NCRawArgs, TokenUid, fallback +from hathor.transaction import Block, Transaction +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.utils import assert_nc_failure_reason + +# TODO: Test support for container args/kwargs such as list[int] after Jan's PR + + +class MyBlueprint(Blueprint): + @public(allow_deposit=True) + def initialize(self, ctx: Context) -> None: + pass + + @fallback(allow_deposit=True) + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> str: + assert method_name == 'unknown' + match nc_args: + case NCRawArgs(): + # XXX: we might need to provide a better way to describe the expected signature to `try_parse_as`, + # because only looking a a tuple of types might not be enough, currently it is implemented + # without the knowledge of default arguments, what this implies is that considering a signature + # with types (str, int), it is possible for an empty tuple () to be a valid call, as long as the + # function has default values for its two arguments, the parser takes the optimist path and + # accepts parsing an empty tuple, so in this case args_bytes=b'\x00' parses to (), because it is + # possible that that is a valid call + result = nc_args.try_parse_as((str, int)) + if result is None: + raise NCFail(f'unsupported args: {nc_args}') + greeting, x = result + return self.greet_double(ctx, greeting, x) + case NCParsedArgs(args, kwargs): + return self.greet_double(ctx, *args, **kwargs) + case _: + assert_never(nc_args) + + def greet_double(self, ctx: Context, greeting: str, x: int) -> str: + return f'{greeting} {x + x}' + + @public(allow_deposit=True) + def call_another_fallback(self, ctx: Context, contract_id: ContractId) -> str: + return self.syscall.call_public_method(contract_id, 'fallback', []) + + @public + def call_own_fallback(self, ctx: Context) -> None: + # Even though users are not supposed to call the fallback like this, there's no harm and current + # code allows it, so I'm adding a test to cover it. We may prohibit it in the future. + nc_args = NCParsedArgs(args=(), kwargs=dict(greeting='hello', x=123)) + self.fallback(ctx, 'unknown', nc_args) + + +class TestFallbackMethod(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.contract_id = self.gen_random_contract_id() + + self.ctx = Context( + actions=[NCDepositAction(token_uid=TokenUid(HATHOR_TOKEN_UID), amount=123)], + vertex=self.get_genesis_tx(), + address=self.gen_random_address(), + timestamp=self.now, + ) + self.runner.create_contract(self.contract_id, self.blueprint_id, self.ctx) + + def test_fallback_only_args_success(self) -> None: + result = self.runner.call_public_method(self.contract_id, 'unknown', self.ctx, 'hello', 123) + assert result == 'hello 246' + + last_call_info = self.runner.get_last_call_info() + assert last_call_info.nc_logger.__entries__ == [ + NCCallBeginEntry.construct( + timestamp=ANY, + nc_id=self.contract_id, + call_type=CallType.PUBLIC, + method_name='fallback', + str_args="('unknown', NCParsedArgs(args=('hello', 123), kwargs={}))", + actions=[dict(amount=123, token_uid='00', type='deposit')] + ), + NCCallEndEntry.construct(timestamp=ANY), + ] + + def test_fallback_only_kwargs_success(self) -> None: + result = self.runner.call_public_method(self.contract_id, 'unknown', self.ctx, greeting='hello', x=123) + assert result == 'hello 246' + + last_call_info = self.runner.get_last_call_info() + assert last_call_info.nc_logger.__entries__ == [ + NCCallBeginEntry.construct( + timestamp=ANY, + nc_id=self.contract_id, + call_type=CallType.PUBLIC, + method_name='fallback', + str_args="('unknown', NCParsedArgs(args=(), kwargs={'greeting': 'hello', 'x': 123}))", + actions=[dict(amount=123, token_uid='00', type='deposit')] + ), + NCCallEndEntry.construct(timestamp=ANY), + ] + + def test_fallback_args_kwargs_success(self) -> None: + result = self.runner.call_public_method(self.contract_id, 'unknown', self.ctx, 'hello', x=123) + assert result == 'hello 246' + + last_call_info = self.runner.get_last_call_info() + assert last_call_info.nc_logger.__entries__ == [ + NCCallBeginEntry.construct( + timestamp=ANY, + nc_id=self.contract_id, + call_type=CallType.PUBLIC, + method_name='fallback', + str_args="('unknown', NCParsedArgs(args=('hello',), kwargs={'x': 123}))", + actions=[dict(amount=123, token_uid='00', type='deposit')] + ), + NCCallEndEntry.construct(timestamp=ANY), + ] + + def test_cannot_call_fallback_directly(self) -> None: + with pytest.raises(NCError, match='method `fallback` is not a public method'): + self.runner.call_public_method(self.contract_id, 'fallback', self.ctx) + + def test_cannot_call_another_fallback_directly(self) -> None: + contract_id = self.gen_random_contract_id() + self.runner.create_contract(contract_id, self.blueprint_id, self.ctx) + with pytest.raises(NCInvalidMethodCall, match='method `fallback` is not a public method'): + self.runner.call_public_method(self.contract_id, 'call_another_fallback', self.ctx, contract_id) + + def test_fallback_args_bytes_success(self) -> None: + args_parser = ArgsOnly.from_arg_types((str, int)) + args_bytes = args_parser.serialize_args_bytes(('hello', 123)) + nc_args = NCRawArgs(args_bytes) + result = self.runner.call_public_method_with_nc_args(self.contract_id, 'unknown', self.ctx, nc_args) + assert result == 'hello 246' + + last_call_info = self.runner.get_last_call_info() + assert last_call_info.nc_logger.__entries__ == [ + NCCallBeginEntry.construct( + timestamp=ANY, + nc_id=self.contract_id, + call_type=CallType.PUBLIC, + method_name='fallback', + str_args=f"('unknown', NCRawArgs('{args_bytes.hex()}'))", + actions=[dict(amount=123, token_uid='00', type='deposit')] + ), + NCCallEndEntry.construct(timestamp=ANY), + ] + + def test_dag_fallback(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + valid_args_parser = ArgsOnly.from_arg_types((str, int)) + valid_args_bytes = valid_args_parser.serialize_args_bytes(('hello', 123)) + invalid_args_parser = ArgsOnly.from_arg_types((int, int)) + invalid_args_bytes = invalid_args_parser.serialize_args_bytes((123, 456)) + + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..11] + b10 < dummy + + nc1.nc_id = "{self.blueprint_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = unknown + nc2.nc_args_bytes = "{valid_args_bytes.hex()}" + + nc3.nc_id = nc1 + nc3.nc_method = unknown + nc3.nc_args_bytes = "{invalid_args_bytes.hex()}" + + nc1 <-- nc2 <-- nc3 <-- b11 + ''') + + artifacts.propagate_with(self.manager) + b11 = artifacts.get_typed_vertex('b11', Block) + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + + assert b11.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + + # nc2 successfully executes because the nc_args_bytes is correct + assert nc2.get_metadata().voided_by is None + + # nc3 fails because the fallback method is not expecting these args_bytes + assert nc3.get_metadata().voided_by == {nc3.hash, NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=nc3.hash, + block_id=b11.hash, + reason=f'NCFail: unsupported args: {invalid_args_bytes.hex()}', + ) + + def test_call_own_fallback(self) -> None: + self.runner.call_public_method(self.contract_id, 'call_own_fallback', self.create_context()) diff --git a/tests/nanocontracts/test_follow_up_call.py b/tests/nanocontracts/test_follow_up_call.py new file mode 100644 index 000000000..5fa7dd379 --- /dev/null +++ b/tests/nanocontracts/test_follow_up_call.py @@ -0,0 +1,112 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from hathor.nanocontracts import Blueprint, Context, NCFail, public, view +from hathor.nanocontracts.types import ContractId +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint1(Blueprint): + other_id: ContractId + + @public + def initialize(self, ctx: Context, other_id: ContractId) -> None: + self.other_id = other_id + + @public + def public_nop(self, ctx: Context) -> None: + pass + + @view + def view_call_other_view(self, method_name: str) -> None: + self.syscall.call_view_method(self.other_id, method_name) + + @public + def public_call_other_view(self, ctx: Context, method_name: str) -> None: + self.syscall.call_view_method(self.other_id, method_name) + + @public + def public_call_other_public(self, ctx: Context, method_name: str) -> None: + self.syscall.call_public_method(self.other_id, method_name, []) + + +class MyBlueprint2(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def view_nop(self) -> None: + pass + + @view + def view_fail(self) -> None: + raise NCFail('fail called') + + @public + def public_nop(self, ctx: Context) -> None: + pass + + @public + def public_fail(self, ctx: Context) -> None: + raise NCFail('fail called') + + +class TestFollowUpCall(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id1 = self._register_blueprint_class(MyBlueprint1) + self.blueprint_id2 = self._register_blueprint_class(MyBlueprint2) + + self.contract_id = self.gen_random_contract_id() + self.other_id = self.gen_random_contract_id() + + self.runner.create_contract(self.other_id, self.blueprint_id2, self.create_context()) + self.runner.create_contract(self.contract_id, self.blueprint_id1, self.create_context(), self.other_id) + + def test_view_call_other_view_success(self) -> None: + self.runner.call_view_method(self.contract_id, 'view_call_other_view', 'view_nop') + self.runner.call_public_method(self.contract_id, 'public_nop', self.create_context()) + + def test_public_call_other_view_success(self) -> None: + self.runner.call_public_method(self.contract_id, 'public_call_other_view', self.create_context(), 'view_nop') + self.runner.call_public_method(self.contract_id, 'public_nop', self.create_context()) + + def test_public_call_other_public_success(self) -> None: + self.runner.call_public_method( + self.contract_id, 'public_call_other_public', self.create_context(), 'public_nop' + ) + self.runner.call_public_method(self.contract_id, 'public_nop', self.create_context()) + + def test_view_call_other_view_fail(self) -> None: + with pytest.raises(NCFail, match='fail called'): + self.runner.call_view_method(self.contract_id, 'view_call_other_view', 'view_fail') + self.runner.call_public_method(self.contract_id, 'public_nop', self.create_context()) + + def test_public_call_other_view_fail(self) -> None: + with pytest.raises(NCFail, match='fail called'): + self.runner.call_public_method( + self.contract_id, 'public_call_other_view', self.create_context(), 'view_fail' + ) + self.runner.call_public_method(self.contract_id, 'public_nop', self.create_context()) + + def test_public_call_other_public_fail(self) -> None: + with pytest.raises(NCFail, match='fail called'): + self.runner.call_public_method( + self.contract_id, 'public_call_other_public', self.create_context(), 'public_fail' + ) + self.runner.call_public_method(self.contract_id, 'public_nop', self.create_context()) diff --git a/tests/nanocontracts/test_get_contract.py b/tests/nanocontracts/test_get_contract.py new file mode 100644 index 000000000..e0796be94 --- /dev/null +++ b/tests/nanocontracts/test_get_contract.py @@ -0,0 +1,149 @@ +import os + +from hathor.conf import HathorSettings +from hathor.crypto.util import decode_address +from hathor.nanocontracts import Blueprint, Context, public, view +from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.types import Address, Amount, ContractId, TokenUid, VertexId +from hathor.transaction import BaseTransaction +from hathor.util import not_none +from hathor.wallet import KeyPair +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +settings = HathorSettings() + + +class MyBlueprint(Blueprint): + counter: int + totals: dict[Address, Amount] + + @public + def initialize(self, ctx: Context) -> None: + # self.totals = {} # XXX: "dict" initializes implicitly + self.counter = 0 + + @view + def get_total(self, address: Address) -> int: + return self.totals.get(address, 0) + + @public(allow_deposit=True) + def address_add(self, ctx: Context, address: Address, amount: Amount) -> None: + self.counter += 1 + # XXX: mypy complains when doing += + self.totals[address] = Amount(self.totals[address] + amount) + + @public(allow_withdrawal=True) + def address_subtract(self, ctx: Context, address: Address, amount: Amount) -> None: + self.counter += 1 + if self.totals[address] < amount: + raise NCFail('cannot subtract') + # XXX: mypy complains when doing -= + self.totals[address] = Amount(self.totals[address] - amount) + + +class NCGetContractTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.token_uid = TokenUid(settings.HATHOR_TOKEN_UID) + self.nc_id = ContractId(VertexId(b'1' * 32)) + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.initialize_contract() + self.nc_storage = self.runner.get_storage(self.nc_id) + + def get_any_tx(self) -> BaseTransaction: + genesis = self.manager.tx_storage.get_all_genesis() + tx = [t for t in genesis if t.is_transaction][0] + return tx + + def get_any_address(self) -> tuple[Address, KeyPair]: + password = os.urandom(12) + key = KeyPair.create(password) + address_b58 = key.address + address_bytes = Address(decode_address(not_none(address_b58))) + return address_bytes, key + + def get_current_timestamp(self) -> int: + return int(self.clock.seconds()) + + def dummy_context(self) -> Context: + return Context([], self.get_any_tx(), Address(b''), timestamp=self.get_current_timestamp()) + + def initialize_contract(self) -> None: + self.runner.create_contract(self.nc_id, self.blueprint_id, self.dummy_context()) + + def test_get_readonly_contract(self) -> None: + contract = self.get_readonly_contract(self.nc_id) + assert isinstance(contract, MyBlueprint) + + # counter was initialized with 0 + self.assertEqual(contract.counter, 0) + + # view method works + address, _ = self.get_any_address() + self.assertEqual(contract.get_total(address), 0) + + # no write, direct or indirect is allowed: + + with self.assertRaises(RuntimeError): + contract.counter = 5 + + with self.assertRaises(RuntimeError): + contract.counter += 1 + + ctx = self.dummy_context() + + with self.assertRaises(RuntimeError): + contract.totals[address] = Amount(5) + + with self.assertRaises(RuntimeError): + contract.address_add(ctx, address, 10) + + def test_get_readwrite_contract(self) -> None: + contract = self.get_readwrite_contract(self.nc_id) + assert isinstance(contract, MyBlueprint) + + # counter was initialized with 0 + self.assertEqual(contract.counter, 0) + + # incrementing works + contract.counter += 2 + self.assertEqual(contract.counter, 2) + + # one more tim to check it added to 2 (and not to 0) + contract.counter += 3 + self.assertEqual(contract.counter, 5) + + # wrong type fails immediately + with self.assertRaises(TypeError): + contract.counter = "7" # type: ignore[assignment] + + # no effect on actual stored value + self.assertEqual(contract.counter, 5) + + ctx = self.dummy_context() + address, _ = self.get_any_address() + + # direct view call works: + contract.totals[address] = Amount(5) + self.assertEqual(contract.get_total(address), 5) + + # dict values also fail immediately if either key or value type is wrong: + with self.assertRaises(TypeError): + contract.totals[address] = "7" # type: ignore[assignment] + with self.assertRaises(TypeError): + contract.totals["myaddress"] = Amount(5) # type: ignore[index] + + # also no effect on stored value + self.assertEqual(contract.get_total(address), 5) + + # view call method also works: + total_address = self.runner.call_view_method(self.nc_id, 'get_total', address) + self.assertEqual(total_address, 5) + + # direct public call works: + contract.address_add(ctx, address, 7) + self.assertEqual(contract.get_total(address), 12) + + # public call method also works: + self.runner.call_public_method(self.nc_id, 'address_subtract', ctx, address, 2) + self.assertEqual(contract.totals[address], 10) diff --git a/tests/nanocontracts/test_indexes.py b/tests/nanocontracts/test_indexes.py new file mode 100644 index 000000000..0e54c0eb0 --- /dev/null +++ b/tests/nanocontracts/test_indexes.py @@ -0,0 +1,229 @@ +from typing import Any, Optional + +from hathor.conf import HathorSettings +from hathor.dag_builder.artifacts import DAGArtifacts +from hathor.manager import HathorManager +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, NCFail, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.types import NCActionType +from hathor.nanocontracts.utils import sign_pycoin +from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks +from hathor.transaction import BaseTransaction, Transaction, TxOutput +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.types import AddressB58 +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.simulation.base import SimulatorTestCase + +settings = HathorSettings() + + +class MyBlueprint(Blueprint): + counter: int + + @public(allow_deposit=True) + def initialize(self, ctx: Context) -> None: + self.counter = 0 + + @public + def nop(self, ctx: Context) -> None: + self.counter += 1 + + @public + def fail(self, ctx: Context) -> None: + raise NCFail('fail') + + +class BaseIndexesTestCase(BlueprintTestCase, SimulatorTestCase): + __test__ = False + + def setUp(self): + super().setUp() + + self.myblueprint_id = b'x' * 32 + self.catalog = NCBlueprintCatalog({ + self.myblueprint_id: MyBlueprint + }) + self.nc_seqnum = 0 + + self.manager.allow_mining_without_peers() + self.manager.tx_storage.nc_catalog = self.catalog + + self.wallet = self.manager.wallet + + self.miner = self.simulator.create_miner(self.manager, hashpower=100e6) + self.miner.start() + + self.token_uid = b'\0' + trigger = StopAfterMinimumBalance(self.wallet, self.token_uid, 1) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + self.assertTrue(self.simulator.run(120)) + + def fill_nc_tx( + self, + nc: Transaction, + nc_id: bytes, + nc_method: str, + nc_args: list[Any], + *, + address: Optional[AddressB58] = None, + nc_actions: list[NanoHeaderAction] | None = None, + ) -> None: + method_parser = Method.from_callable(getattr(MyBlueprint, nc_method)) + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + if address is None: + address = self.wallet.get_unused_address() + privkey = self.wallet.get_private_key(address) + + from hathor.transaction.headers import NanoHeader + nano_header = NanoHeader( + tx=nc, + nc_seqnum=self.nc_seqnum, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=nc_actions or [], + ) + nc.headers.append(nano_header) + self.nc_seqnum += 1 + + sign_pycoin(nano_header, privkey) + + def finish_and_broadcast_tx(self, tx: BaseTransaction, confirmations: int = 1) -> None: + tx.timestamp = int(self.manager.reactor.seconds()) + tx.parents = self.manager.get_new_tx_parents() + tx.weight = self.manager.daa.minimum_tx_weight(tx) + + # broadcast + self.manager.cpu_mining_service.resolve(tx) + self.manager.on_new_tx(tx) + trigger = StopAfterNMinedBlocks(self.miner, quantity=confirmations) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + def test_tokens_index(self): + token_info0 = self.manager.tx_storage.indexes.tokens.get_token_info(self.token_uid) + new_blocks = 0 + + # Deposits 1 HTR + _inputs, deposit_amount = self.wallet.get_inputs_from_amount(1, self.manager.tx_storage) + tx = self.wallet.prepare_transaction(Transaction, _inputs, []) + self.fill_nc_tx(tx, self.myblueprint_id, 'initialize', [], nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount, + ) + ]) + self.finish_and_broadcast_tx(tx, confirmations=2) + new_blocks += 2 + + self.assertIsNotNone(tx.get_metadata().first_block) + self.assertIsNone(tx.get_metadata().voided_by) + nc_id = tx.hash + + token_info1 = self.manager.tx_storage.indexes.tokens.get_token_info(self._settings.HATHOR_TOKEN_UID) + self.assertEqual(token_info0.get_total() + 64_00 * new_blocks, token_info1.get_total()) + + # Withdrawals 1 HTR + tx2 = Transaction(outputs=[TxOutput(1, b'', 0)]) + self.fill_nc_tx(tx2, nc_id, 'nop', [], nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=0, + amount=1, + ) + ]) + self.finish_and_broadcast_tx(tx2, confirmations=2) + new_blocks += 2 + + token_info1 = self.manager.tx_storage.indexes.tokens.get_token_info(self._settings.HATHOR_TOKEN_UID) + self.assertEqual(token_info0.get_total() + 64_00 * new_blocks, token_info1.get_total()) + + def test_remove_voided_nano_tx_from_parents_1(self): + vertices = self._run_test_remove_voided_nano_tx_from_parents('tx3 < b35') + v = [node.name for node, _ in vertices.list] + self.assertTrue(v.index('tx3') < v.index('b35')) + + def test_remove_voided_nano_tx_from_parents_2(self): + vertices = self._run_test_remove_voided_nano_tx_from_parents('b35 < tx3') + v = [node.name for node, _ in vertices.list] + self.assertTrue(v.index('b35') < v.index('tx3')) + + def _run_test_remove_voided_nano_tx_from_parents(self, order: str) -> DAGArtifacts: + builder = TestDAGBuilder.from_manager(self.manager) + vertices = builder.build_from_str(f''' + blockchain genesis b[0..40] + b0.weight = 50 + + b30 < dummy + + tx1.nc_id = "{self.myblueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 10 HTR + tx1.out[0] <<< tx2 + + tx2.nc_id = tx1 + tx2.nc_method = fail() + tx2.out[0] <<< tx3 + + tx3.nc_id = tx1 + tx3.nc_method = nop() + + tx1 <-- tx2 <-- b35 + + {order} + ''') + + for node, vertex in vertices.list: + print() + print(node.name) + print() + self.manager.on_new_tx(vertex) + + tx1 = vertices.by_name['tx1'].vertex + tx2 = vertices.by_name['tx2'].vertex + tx3 = vertices.by_name['tx3'].vertex + b35 = vertices.by_name['b35'].vertex + + meta1 = tx1.get_metadata() + meta2 = tx2.get_metadata() + meta3 = tx3.get_metadata() + + # confirm that b35 belongs to the best blockchain + self.assertIsNone(b35.get_metadata().voided_by) + + # only tx1 and tx2 should be confirmed + self.assertEqual(meta1.first_block, b35.hash) + self.assertEqual(meta2.first_block, b35.hash) + self.assertIsNone(meta3.first_block) + + # tx1 succeeded; tx2 failed so tx3 must be voided + self.assertIsNone(meta1.voided_by) + self.assertEqual(meta2.voided_by, {tx2.hash, NC_EXECUTION_FAIL_ID}) + self.assertEqual(meta3.voided_by, {tx2.hash}) + + # check we are not using tx3 as parents for transactions + parent_txs = self.manager.generate_parent_txs(timestamp=None) + self.assertNotIn(tx3.hash, parent_txs.can_include) + self.assertNotIn(tx3.hash, parent_txs.must_include) + + # check we are not using tx3 as parents for blocks + block_templates = self.manager.make_block_templates() + for template in block_templates: + self.assertNotIn(tx3.hash, template.parents) + self.assertNotIn(tx3.hash, template.parents_any) + + return vertices + + +class RocksDBIndexesTestCase(BaseIndexesTestCase): + __test__ = True + + def build_manager(self) -> 'HathorManager': + builder = self.simulator.get_default_builder() + builder.enable_wallet_index() + return self.simulator.create_peer(builder) diff --git a/tests/nanocontracts/test_indexes2.py b/tests/nanocontracts/test_indexes2.py new file mode 100644 index 000000000..6cf58acad --- /dev/null +++ b/tests/nanocontracts/test_indexes2.py @@ -0,0 +1,71 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.types import ContractId, VertexId +from hathor.nanocontracts.utils import derive_child_token_id +from hathor.transaction import Transaction +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.util import get_deposit_amount +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + @public(allow_deposit=True) + def initialize(self, ctx: Context, amount: int) -> None: + self.syscall.create_token(token_name='token a', token_symbol='TKA', amount=amount) + + +class TestIndexes2(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + assert self.manager.tx_storage.indexes is not None + assert self.manager.tx_storage.indexes.tokens is not None + self.tokens_index = self.manager.tx_storage.indexes.tokens + + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + def test_indexes_tx_affected_twice(self) -> None: + amount = 10000 + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..11] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize({amount}) + tx1.nc_deposit = 1000 HTR + tx1 <-- b11 # Confirming tx1 means it's affected in the consensus + + tx1.out[0] <<< tx2 # Spending tx1 means it's affected in the consensus for a second time + b11 < tx2 + ''') + artifacts.propagate_with(self.manager) + + tx1, = artifacts.get_typed_vertices(['tx1'], Transaction) + tka = derive_child_token_id(ContractId(VertexId(tx1.hash)), 'TKA') + + tka_token_info = self.tokens_index.get_token_info(tka) + htr_token_info = self.tokens_index.get_token_info(HATHOR_TOKEN_UID) + + assert tx1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tka_token_info.get_total() == amount + assert htr_token_info.get_total() == ( + self._settings.GENESIS_TOKENS + + 11 * self._settings.INITIAL_TOKENS_PER_BLOCK + - get_deposit_amount(self._settings, amount) + ) diff --git a/tests/nanocontracts/test_invalid_value_assignment.py b/tests/nanocontracts/test_invalid_value_assignment.py new file mode 100644 index 000000000..9de4c2165 --- /dev/null +++ b/tests/nanocontracts/test_invalid_value_assignment.py @@ -0,0 +1,48 @@ +from hathor.conf import HathorSettings +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.nc_types import make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.types import ContractId, TokenUid, VertexId +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +settings = HathorSettings() + +INT_NC_TYPE = make_nc_type(int) + + +class MyBlueprint(Blueprint): + x: int + + @public + def initialize(self, ctx: Context) -> None: + self.x = 0 + + @public + def valid_assign(self, ctx: Context) -> None: + self.x = 1 + + @public + def invalid_assign(self, ctx: Context) -> None: + self.x = "2" # type: ignore[assignment] + + +class NCGetContractTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.token_uid = TokenUid(settings.HATHOR_TOKEN_UID) + self.nc_id = ContractId(VertexId(b'1' * 32)) + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.runner.create_contract(self.nc_id, self.blueprint_id, self.create_context()) + self.nc_storage = self.runner.get_storage(self.nc_id) + + def test_get_readwrite_contract(self) -> None: + self.assertEqual(self.nc_storage.get_obj(b'x', INT_NC_TYPE), 0) + + self.runner.call_public_method(self.nc_id, 'valid_assign', self.create_context()) + self.assertEqual(self.nc_storage.get_obj(b'x', INT_NC_TYPE), 1) + + # XXX: the invalid_assign should fail as soon as put_obj is called, which makes this call fail with a NCFail, + # in the case where it doesn't fail immediately (and it's left to fail on commit), the exception raised + # will be a `TypeError` when commit is called. + with self.assertRaises(NCFail): + self.runner.call_public_method(self.nc_id, 'invalid_assign', self.create_context()) diff --git a/tests/nanocontracts/test_method_parser.py b/tests/nanocontracts/test_method_parser.py new file mode 100644 index 000000000..752832cbf --- /dev/null +++ b/tests/nanocontracts/test_method_parser.py @@ -0,0 +1,325 @@ +import json +from collections.abc import Callable +from typing import Any, Optional, TypeVar + +import pytest + +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCFail, NCSerializationArgTooLong +from hathor.nanocontracts.method import MAX_BYTES_SERIALIZED_ARG, Method +from hathor.nanocontracts.types import SignedData, public +from tests import unittest + +T = TypeVar('T') + + +class MyBlueprint: + @public + def initialize(self, ctx: Context, a: str, b: bytes, c: int, d: bool) -> None: + pass + + @public + def method_str(self, ctx: Context, x: str) -> None: + pass + + @public + def method_bytes(self, ctx: Context, x: bytes) -> None: + pass + + @public + def method_int(self, ctx: Context, x: int) -> None: + pass + + @public + def method_bool(self, ctx: Context, x: bool) -> None: + pass + + @public + def method_signed_str(self, ctx: Context, x: SignedData[str]) -> None: + pass + + @public + def method_with_optional(self, ctx: Context, x: Optional[str]) -> None: + pass + + @public + def method_with_tuple(self, ctx: Context, x: tuple[str, int, int]) -> None: + pass + + +class NCBlueprintTestCase(unittest.TestCase): + def _run_test(self, method: Callable[[Any, T], None], data: T) -> None: + parser = Method.from_callable(method) + self._run_test_parser(parser, data) + + def _run_test_parser(self, method_parser: Method, data: T) -> None: + # Then, check serialization and deserialization. + args_in = (data,) + serialized_args_in = method_parser.serialize_args_bytes(args_in) + args_out = method_parser.deserialize_args_bytes(serialized_args_in) + self.assertEqual(args_in, args_out) + + # Also check that types match (they don't necessarily always match) + self.assertEqual(type(args_in), type(args_out)) + + def test_type_str_wrong_type(self) -> None: + with pytest.raises(NCFail) as e: + self._run_test(MyBlueprint.method_str, b'') + assert isinstance(e.value.__cause__, TypeError) + + def test_type_str_empty(self) -> None: + self._run_test(MyBlueprint.method_str, '') + + def test_type_str_small(self) -> None: + self._run_test(MyBlueprint.method_str, 'a') + + def test_type_str_long(self) -> None: + # there are 3 bytes of overhead when serializing + # 1 byte for the number of arguments in method_bytes + # 2 bytes for the length of the byte sequence that follows (because its length exceeds 63 bytes) + # since utf-8 encoding for 'a' doesn't change it, it works as if it was bytes + overhead = 3 + length = MAX_BYTES_SERIALIZED_ARG - overhead + self._run_test(MyBlueprint.method_str, 'a' * length) + + def test_type_str_too_long(self) -> None: + with self.assertRaises(NCSerializationArgTooLong): + length = MAX_BYTES_SERIALIZED_ARG + 1 + self._run_test(MyBlueprint.method_str, 'a' * length) + + def test_type_str_accents(self) -> None: + self._run_test(MyBlueprint.method_str, 'áéíóú') + + def test_type_bytes_empty(self) -> None: + self._run_test(MyBlueprint.method_bytes, b'') + + def test_type_bytes_small(self) -> None: + self._run_test(MyBlueprint.method_bytes, b'a') + + def test_type_bytes_long(self) -> None: + # there are 3 bytes of overhead when serializing + # 1 byte for the number of arguments in method_bytes + # 2 bytes for the length of the byte sequence that follows (because its length exceeds 63 bytes) + overhead = 3 + length = MAX_BYTES_SERIALIZED_ARG - overhead + self._run_test(MyBlueprint.method_bytes, b'a' * length) + + def test_type_bytes_too_long(self) -> None: + with self.assertRaises(NCSerializationArgTooLong): + length = MAX_BYTES_SERIALIZED_ARG + 1 + self._run_test(MyBlueprint.method_bytes, b'a' * length) + + def test_type_bytes_even_longer(self) -> None: + class Foo: + def bar(self, data: bytes) -> None: + pass + parser = Method.from_callable(Foo.bar) + parser.args._max_bytes = 2**32 # more than long enough to test a single bytes write + max_write_length = 2**16 - 3 + self._run_test_parser(parser, b'a' * max_write_length) # largest valid write + with self.assertRaises(NCSerializationArgTooLong): + self._run_test_parser(parser, b'a' * (max_write_length + 1)) # smallest invalid write + + def test_type_int_negative(self) -> None: + self._run_test(MyBlueprint.method_int, -100) + + def test_type_int_zero(self) -> None: + self._run_test(MyBlueprint.method_int, 0) + + def test_type_int_positive(self) -> None: + self._run_test(MyBlueprint.method_int, 100) + + def test_type_int_too_big(self) -> None: + with pytest.raises(NCFail) as e: + self._run_test(MyBlueprint.method_int, 2**223) + assert isinstance(e.value.__cause__, ValueError) + + def test_type_int_too_small(self) -> None: + with pytest.raises(NCFail) as e: + self._run_test(MyBlueprint.method_int, -2**223 - 1) + assert isinstance(e.value.__cause__, ValueError) + + def test_type_int_wrong_type(self) -> None: + with pytest.raises(NCFail) as e: + self._run_test(MyBlueprint.method_int, 1.) + assert isinstance(e.value.__cause__, TypeError) + + def test_type_int(self) -> None: + class Foo: + def bar(self, i: int) -> None: + pass + + valid_values = [ + 0, + 1, + -1, + 2**31, + -2**31, + # edge valid values for 32 bytes of signed leb128 with 4 bytes + 2**223 - 1, + -2**223, + ] + for valid_value in valid_values: + self._run_test(Foo.bar, valid_value) + + invalid_values = [ + 2**223, + -2**223 - 1, + 2**223 + 1, + 2**224, + -2**223 - 2, + -2**224, + ] + for invalid_value in invalid_values: + with pytest.raises(NCFail) as e: + self._run_test(Foo.bar, invalid_value) + assert isinstance(e.value.__cause__, ValueError) + + def test_type_bool_false(self) -> None: + self._run_test(MyBlueprint.method_bool, False) + + def test_type_bool_true(self) -> None: + self._run_test(MyBlueprint.method_bool, True) + + def test_type_optional_str_none(self) -> None: + self._run_test(MyBlueprint.method_with_optional, None) + + def test_type_optional_str_empty(self) -> None: + self._run_test(MyBlueprint.method_with_optional, '') + + def test_type_optional_str(self) -> None: + self._run_test(MyBlueprint.method_with_optional, 'hathor') + + def test_type_tuple(self) -> None: + self._run_test(MyBlueprint.method_with_tuple, ('x', 1, 2)) + + def test_type_signed_str(self) -> None: + x: SignedData[str] = SignedData[str]('áéíóú', b'here-goes-the-signature') + self._run_test(MyBlueprint.method_signed_str, x) + + def test_basic_types(self) -> None: + parser = Method.from_callable(MyBlueprint.initialize) + + # Then, check serialization and deserialization. + args_in = ('a', b'b', 1, True) + serialized_args_in = parser.serialize_args_bytes(args_in) + args_out = parser.deserialize_args_bytes(serialized_args_in) + self.assertEqual(args_in, args_out) + + def test_arg_parse_str(self) -> None: + parser = Method.from_callable(MyBlueprint.method_str) + + value = 'test' + args_json = json.loads(f'["{value}"]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], value) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json((value,)) + self.assertEqual(args_json, args_json2) + + def test_arg_parse_bytes(self) -> None: + parser = Method.from_callable(MyBlueprint.method_bytes) + + value = b'\x01' + args_json = json.loads(f'["{value.hex()}"]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], value) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json((value,)) + self.assertEqual(args_json, args_json2) + + def test_arg_parse_int(self) -> None: + parser = Method.from_callable(MyBlueprint.method_int) + + value = 1 + args_json = json.loads(f'[{value}]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], value) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json((value,)) + self.assertEqual(args_json, args_json2) + + def test_arg_parse_bool(self) -> None: + parser = Method.from_callable(MyBlueprint.method_bool) + + args_json = json.loads('[false]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], False) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json((False,)) + self.assertEqual(args_json, args_json2) + + def test_arg_parse_optional_none(self) -> None: + parser = Method.from_callable(MyBlueprint.method_with_optional) + + # If optional is None + args_json = json.loads('[null]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], None) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json((None,)) + self.assertEqual(args_json, args_json2) + + def test_arg_parse_optional_some(self) -> None: + parser = Method.from_callable(MyBlueprint.method_with_optional) + + # If optional has str value + value = 'test' + args_json = json.loads(f'["{value}"]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], value) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json(('test',)) + self.assertEqual(args_json, args_json2) + + def test_arg_parse_tuple(self): + parser = Method.from_callable(MyBlueprint.method_with_tuple) + + args_json = json.loads('[["test", 1, 2]]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], ('test', 1, 2)) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json((('test', 1, 2),)) + self.assertEqual(args_json, args_json2) + + def test_arg_parse_signed_data(self) -> None: + parser = Method.from_callable(MyBlueprint.method_signed_str) + + args_json = json.loads('[["test", "1234"]]') + parsed_args = parser.args.json_to_value(args_json) + + # test that it parsed back the original value + self.assertEqual(len(parsed_args), 1) + self.assertEqual(parsed_args[0], SignedData[str]('test', bytes.fromhex('1234'))) + + # also test that it can generate the same JSON representation + args_json2 = parser.args.value_to_json((SignedData[str]('test', bytes.fromhex('1234')),)) + self.assertEqual(args_json, args_json2) diff --git a/tests/nanocontracts/test_nanocontract.py b/tests/nanocontracts/test_nanocontract.py new file mode 100644 index 000000000..1d4a5da8e --- /dev/null +++ b/tests/nanocontracts/test_nanocontract.py @@ -0,0 +1,482 @@ +from typing import Any + +import pytest +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec + +from hathor.crypto.util import ( + decode_address, + get_address_b58_from_bytes, + get_address_from_public_key_bytes, + get_public_key_bytes_compressed, +) +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCInvalidSignature +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.nc_types import make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.types import ( + NCActionType, + NCDepositAction, + NCWithdrawalAction, + TokenUid, + VertexId, + public, + view, +) +from hathor.nanocontracts.utils import sign_openssl, sign_openssl_multisig +from hathor.transaction import Transaction, TxInput, TxOutput +from hathor.transaction.exceptions import ( + EqualVerifyFailed, + FinalStackInvalid, + InvalidScriptError, + MissingStackItems, + TooManySigOps, +) +from hathor.transaction.headers import NanoHeader, VertexHeaderId +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.scripts import P2PKH, HathorScript, Opcode +from hathor.transaction.validation_state import ValidationState +from hathor.verification.nano_header_verifier import MAX_NC_SCRIPT_SIGOPS_COUNT, MAX_NC_SCRIPT_SIZE +from hathor.wallet import KeyPair +from tests import unittest + +STR_NC_TYPE = make_nc_type(str) +INT_NC_TYPE = make_nc_type(int) + + +class MyBlueprint(Blueprint): + a: str + b: int + + @public + def initialize(self, ctx: Context, a: str, b: int) -> None: + self.a = a + self.b = b + + @public + def inc_b(self, ctx: Context) -> None: + self.b += 1 + + @view + def get_a(self) -> str: + return self.a + + @view + def get_b(self) -> int: + return self.b + + +class NCNanoContractTestCase(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + + self.myblueprint_id = VertexId(b'x' * 32) + self.catalog = NCBlueprintCatalog({ + self.myblueprint_id: MyBlueprint + }) + self.nc_seqnum = 0 + + self.peer = self.create_peer('unittests') + self.peer.tx_storage.nc_catalog = self.catalog + + self.genesis = self.peer.tx_storage.get_all_genesis() + self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] + + def _create_nc( + self, + nc_id: VertexId, + nc_method: str, + nc_args: list[Any], + *, + parents: list[bytes] | None = None, + timestamp: int = 0, + ) -> Transaction: + + if parents is None: + parents = [] + + tx_storage = self.peer.tx_storage + + nc = Transaction(weight=1, inputs=[], outputs=[], parents=parents, storage=tx_storage, timestamp=timestamp) + self._fill_nc(nc, nc_id, nc_method, nc_args) + return nc + + def _fill_nc(self, nc: Transaction, nc_id: VertexId, nc_method: str, nc_args: list[Any]) -> None: + method = getattr(MyBlueprint, nc_method, None) + if method is not None: + method_parser = Method.from_callable(method) + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + else: + nc_args_bytes = b'' + + key = KeyPair.create(b'123') + privkey = key.get_private_key(b'123') + + nano_header = NanoHeader( + tx=nc, + nc_seqnum=self.nc_seqnum, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=[], + ) + nc.headers.append(nano_header) + self.nc_seqnum += 1 + + sign_openssl(nano_header, privkey) + self.peer.cpu_mining_service.resolve(nc) + + def _get_nc(self, *, parents: list[bytes] | None = None, timestamp: int = 0) -> Transaction: + return self._create_nc(self.myblueprint_id, 'initialize', ['string', 1], parents=parents, timestamp=timestamp) + + def test_serialization(self) -> None: + nc = self._get_nc() + + nc_bytes = bytes(nc) + nc2 = Transaction.create_from_struct(nc_bytes, verbose=print) + self.assertEqual(nc_bytes, bytes(nc2)) + + nc2 = Transaction.create_from_struct(nc_bytes) + self.assertEqual(nc_bytes, bytes(nc2)) + + nc_header = nc.get_nano_header() + nc2_header = nc2.get_nano_header() + + self.assertEqual(nc_header.nc_seqnum, nc2_header.nc_seqnum) + self.assertEqual(nc_header.nc_id, nc2_header.nc_id) + self.assertEqual(nc_header.nc_method, nc2_header.nc_method) + self.assertEqual(nc_header.nc_args_bytes, nc2_header.nc_args_bytes) + self.assertEqual(nc_header.nc_actions, nc2_header.nc_actions) + self.assertEqual(nc_header.nc_address, nc2_header.nc_address) + self.assertEqual(nc_header.nc_script, nc2_header.nc_script) + + def test_serialization_skip_signature(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + sighash_bytes = nano_header.get_sighash_bytes() + deserialized, buf = NanoHeader.deserialize(Transaction(), VertexHeaderId.NANO_HEADER.value + sighash_bytes) + + assert len(buf) == 0 + assert deserialized.nc_seqnum == nano_header.nc_seqnum + assert deserialized.nc_id == nano_header.nc_id + assert deserialized.nc_method == nano_header.nc_method + assert deserialized.nc_args_bytes == nano_header.nc_args_bytes + assert deserialized.nc_actions == nano_header.nc_actions + assert deserialized.nc_address == nano_header.nc_address + assert deserialized.nc_script == b'' + + def test_verify_signature_success(self) -> None: + nc = self._get_nc() + nc.clear_sighash_cache() + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_fails_nc_id(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + nano_header.nc_id = b'a' * 32 + nc.clear_sighash_cache() + with self.assertRaises(NCInvalidSignature): + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_fails_nc_method(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + nano_header.nc_method = 'other_nc_method' + nc.clear_sighash_cache() + with self.assertRaises(NCInvalidSignature): + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_fails_nc_args_bytes(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + nano_header.nc_args_bytes = b'other_nc_args_bytes' + nc.clear_sighash_cache() + with self.assertRaises(NCInvalidSignature): + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_fails_invalid_nc_address(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + nano_header.nc_address = b'invalid-address' + nc.clear_sighash_cache() + with pytest.raises(NCInvalidSignature, match=f'invalid address: {nano_header.nc_address.hex()}'): + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_fails_invalid_nc_script(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + nano_header.nc_script = b'invalid-script' + nc.clear_sighash_cache() + with pytest.raises(InvalidScriptError, match='Invalid Opcode'): + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_fails_wrong_nc_address(self) -> None: + key = KeyPair.create(b'xyz') + privkey = key.get_private_key(b'xyz') + pubkey = privkey.public_key() + pubkey_bytes = get_public_key_bytes_compressed(pubkey) + + nc = self._get_nc() + nano_header = nc.get_nano_header() + nano_header.nc_address = get_address_from_public_key_bytes(pubkey_bytes) + nc.clear_sighash_cache() + with pytest.raises(NCInvalidSignature) as e: + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + assert isinstance(e.value.__cause__, EqualVerifyFailed) + + def test_verify_signature_fails_wrong_pubkey(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + + key = KeyPair.create(b'xyz') + privkey = key.get_private_key(b'xyz') + pubkey = privkey.public_key() + pubkey_bytes = get_public_key_bytes_compressed(pubkey) + nano_header.nc_address = get_address_from_public_key_bytes(pubkey_bytes) + + nc.clear_sighash_cache() + data = nc.get_sighash_all_data() + signature = privkey.sign(data, ec.ECDSA(hashes.SHA256())) + nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) + + # First, it's passing with the key from above + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + # We change the script to use a new pubkey, but with the same signature + key = KeyPair.create(b'wrong') + privkey = key.get_private_key(b'wrong') + pubkey = privkey.public_key() + pubkey_bytes = get_public_key_bytes_compressed(pubkey) + nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) + + with pytest.raises(NCInvalidSignature) as e: + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + assert isinstance(e.value.__cause__, EqualVerifyFailed) + + def test_verify_signature_fails_wrong_signature(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + + key = KeyPair.create(b'xyz') + privkey = key.get_private_key(b'xyz') + pubkey = privkey.public_key() + pubkey_bytes = get_public_key_bytes_compressed(pubkey) + nano_header.nc_address = get_address_from_public_key_bytes(pubkey_bytes) + + nc.clear_sighash_cache() + data = nc.get_sighash_all_data() + signature = privkey.sign(data, ec.ECDSA(hashes.SHA256())) + nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) + + # First, it's passing with the key from above + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + # We change the script to use a new signature, but with the same pubkey + key = KeyPair.create(b'wrong') + privkey = key.get_private_key(b'wrong') + signature = privkey.sign(data, ec.ECDSA(hashes.SHA256())) + nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) + + with pytest.raises(NCInvalidSignature) as e: + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + assert isinstance(e.value.__cause__, FinalStackInvalid) + assert 'Stack left with False value' in e.value.__cause__.args[0] + + def test_verify_signature_fails_nc_script_too_large(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + nano_header.nc_script = b'\x00' * (MAX_NC_SCRIPT_SIZE + 1) + + with pytest.raises(NCInvalidSignature, match='nc_script larger than max: 1025 > 1024'): + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_fails_nc_script_too_many_sigops(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + + script = HathorScript() + for _ in range(MAX_NC_SCRIPT_SIGOPS_COUNT + 1): + script.addOpcode(Opcode.OP_CHECKSIG) + + nano_header.nc_script = script.data + + with pytest.raises(TooManySigOps, match='sigops count greater than max: 21 > 20'): + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + def test_verify_signature_multisig(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + + keys: list[tuple[ec.EllipticCurvePrivateKey, bytes]] = [] + for i in range(3): + password = i.to_bytes() + key = KeyPair.create(password) + privkey = key.get_private_key(password) + pubkey = privkey.public_key() + pubkey_bytes = get_public_key_bytes_compressed(pubkey) + keys.append((privkey, pubkey_bytes)) + + # 3 keys are accepted + redeem_pubkey_bytes = [x[1] for x in keys] + + # Test fails because requires 2 signatures, but only has 1 + nc.clear_sighash_cache() + sign_openssl_multisig( + nano_header, + required_count=2, + redeem_pubkey_bytes=redeem_pubkey_bytes, + sign_privkeys=[keys[0][0]], + ) + with pytest.raises(NCInvalidSignature) as e: + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + assert isinstance(e.value.__cause__, MissingStackItems) + assert e.value.__cause__.args[0] == 'OP_CHECKMULTISIG: not enough signatures on the stack' + + # Test fails because requires 1 signature, but used wrong privkey + nc.clear_sighash_cache() + sign_openssl_multisig( + nano_header, + required_count=1, + redeem_pubkey_bytes=redeem_pubkey_bytes, + sign_privkeys=[KeyPair.create(b'invalid').get_private_key(b'invalid')], + ) + with pytest.raises(NCInvalidSignature) as e: + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + assert isinstance(e.value.__cause__, FinalStackInvalid) + assert 'Stack left with False value' in e.value.__cause__.args[0] + + # Test passes because requires 2 signatures, and signed with 2 correct privkeys + nc.clear_sighash_cache() + sign_openssl_multisig( + nano_header, + required_count=2, + redeem_pubkey_bytes=redeem_pubkey_bytes, + sign_privkeys=[x[0] for x in keys[:2]], + ) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + + # Test fails because the address was changed + nc.clear_sighash_cache() + nano_header.nc_address = decode_address(self.peer.wallet.get_unused_address()) + with pytest.raises(NCInvalidSignature) as e: + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + assert isinstance(e.value.__cause__, EqualVerifyFailed) + + def test_get_related_addresses(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + related_addresses = set(nc.get_related_addresses()) + address = get_address_b58_from_bytes(nano_header.nc_address) + self.assertIn(address, related_addresses) + + def create_nano(self) -> Transaction: + parents = [tx.hash for tx in self.genesis_txs] + timestamp = 1 + max(tx.timestamp for tx in self.genesis) + + nc = self._get_nc(parents=parents, timestamp=timestamp) + self.assertTrue(self.peer.on_new_tx(nc)) + return nc + + def test_dag_call_public_method(self) -> None: + nc = self.create_nano() + + parents = [tx.hash for tx in self.genesis_txs] + timestamp = 1 + max(tx.timestamp for tx in self.genesis) + + nc2 = self._create_nc( + nc_id=VertexId(nc.hash), + nc_method='inc_b', + nc_args=[], + parents=parents, + timestamp=timestamp, + ) + self.assertTrue(self.peer.on_new_tx(nc2)) + + def test_get_context(self) -> None: + tx_storage = self.peer.tx_storage + + # Incomplete transaction. It will be used as input of nc2. + outputs = [ + TxOutput(100, b'', 0), # HTR + TxOutput(200, b'', 1), # TOKEN A + TxOutput(300, b'', 2), # TOKEN B + ] + tokens = [b'token-a', b'token-b'] + tx = Transaction(outputs=outputs, tokens=tokens) + tx.parents = [tx.hash for tx in self.genesis_txs] + tx.get_metadata().validation = ValidationState.FULL + tx.update_hash() + tx.init_static_metadata_from_storage(self._settings, tx_storage) + tx_storage.save_transaction(tx) + + # Incomplete nanocontract transaction. + inputs = [ + TxInput(tx.hash, 0, b''), + TxInput(tx.hash, 1, b''), + TxInput(tx.hash, 2, b''), + ] + outputs = [ + TxOutput(10, b'', 0), # HTR + TxOutput(250, b'', 1), # TOKEN A + TxOutput(300, b'', 2), # TOKEN B + ] + nc2 = Transaction( + weight=1, + inputs=inputs, + outputs=outputs, + tokens=tokens, + storage=tx_storage, + ) + nc2.headers.append(NanoHeader( + tx=nc2, + nc_seqnum=0, + nc_id=b'', + nc_method='', + nc_args_bytes=b'', + nc_address=b'\x00' * 25, + nc_script=b'', + nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=1, + amount=50, + ), + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=90, + ), + ], + )) + nc2.update_hash() + nc2_nano_header = nc2.get_nano_header() + context = nc2_nano_header.get_context() + self.assertEqual(2, len(context.actions)) + + action1 = context.get_single_action(TokenUid(b'token-a')) + assert isinstance(action1, NCWithdrawalAction) + self.assertEqual(action1.amount, 50) + + action2 = context.get_single_action(TokenUid(b'\0')) + assert isinstance(action2, NCDepositAction) + self.assertEqual(action2.amount, 90) + + def _to_frozenset(x: list[dict]) -> set[frozenset]: + return {frozenset(d.items()) for d in x} + + expected_json_actions = [{ + 'type': 'withdrawal', + 'token_uid': b'token-a'.hex(), + 'amount': 50, + }, { + 'type': 'deposit', + 'token_uid': b'\0'.hex(), + 'amount': 90, + }] + data = context.to_json() + json_actions = data['actions'] + self.assertEqual(_to_frozenset(json_actions), _to_frozenset(expected_json_actions)) diff --git a/tests/nanocontracts/test_nc_exec_logs.py b/tests/nanocontracts/test_nc_exec_logs.py new file mode 100644 index 000000000..e6b54a879 --- /dev/null +++ b/tests/nanocontracts/test_nc_exec_logs.py @@ -0,0 +1,607 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from textwrap import dedent +from unittest.mock import ANY + +from hathor.nanocontracts import Blueprint, Context, NCFail, public +from hathor.nanocontracts.nc_exec_logs import ( + NCCallBeginEntry, + NCCallEndEntry, + NCExecEntry, + NCLogConfig, + NCLogEntry, + NCLogLevel, +) +from hathor.nanocontracts.runner import CallType +from hathor.nanocontracts.types import ContractId, NCDepositAction, TokenUid, view +from hathor.transaction import Block, Transaction +from hathor.util import not_none +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + +MY_BLUEPRINT1_ID: bytes = b'\x11' * 32 +MY_BLUEPRINT2_ID: bytes = b'\x22' * 32 + + +class MyBlueprint1(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + self.log.info('initialize() called on MyBlueprint1') + + @public + def log_levels(self, ctx: Context) -> None: + msg = 'log_levels() called' + self.log.debug(msg, test1=1) + self.log.info(msg, test2=2) + self.log.warn(msg, test3=3) + self.log.error(msg, test4=4) + + @public + def fail(self, ctx: Context) -> None: + self.log.warn('fail() called') + raise NCFail('some fail') + + @public + def value_error(self, ctx: Context) -> None: + self.log.warn('value_error() called') + raise ValueError('some value error') + + @public(allow_deposit=True) + def call_another_public(self, ctx: Context, contract_id: ContractId) -> None: + self.log.debug('call_another_public() called on MyBlueprint1', contract_id=contract_id) + actions = [NCDepositAction(token_uid=TokenUid(b'\x00'), amount=5)] + result1 = self.syscall.call_public_method(contract_id, 'sum', actions, 1, 2) + result2 = self.syscall.call_view_method(contract_id, 'hello_world') + self.log.debug('results on MyBlueprint1', result1=result1, result2=result2) + + +class MyBlueprint2(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + self.log.info('initialize() called on MyBlueprint2') + + @public(allow_deposit=True) + def sum(self, ctx: Context, a: int, b: int) -> int: + self.log.debug('sum() called on MyBlueprint2', a=a, b=b) + return a + b + + @view + def hello_world(self) -> str: + self.log.debug('hello_world() called on MyBlueprint2') + return 'hello world' + + +class BaseNCExecLogs(unittest.TestCase): + __test__ = False + + def _get_initialize_entries(self, tx: Transaction) -> list[NCCallBeginEntry | NCLogEntry | NCCallEndEntry]: + assert tx.is_nano_contract() + nano_header = tx.get_nano_header() + assert self.manager.tx_storage.nc_catalog is not None + blueprint_class = self.manager.tx_storage.nc_catalog.blueprints[nano_header.nc_id] + return [ + NCCallBeginEntry.construct( + nc_id=tx.hash, + call_type=CallType.PUBLIC, + method_name='initialize', + timestamp=ANY, + actions=[], + ), + NCLogEntry.construct( + level=NCLogLevel.INFO, + message=f'initialize() called on {blueprint_class.__name__}', + timestamp=ANY, + ), + NCCallEndEntry.construct(timestamp=ANY), + ] + + def _prepare(self, nc_log_config: NCLogConfig = NCLogConfig.ALL) -> None: + settings = self._settings._replace( + REWARD_SPEND_MIN_BLOCKS=1, # to make tests quicker + ) + artifacts = self.get_builder() \ + .set_settings(settings) \ + .set_nc_log_config(nc_log_config) \ + .build() + + self.nc_log_storage = not_none(artifacts.consensus.block_algorithm_factory.nc_log_storage) + self.manager = artifacts.manager + assert self.manager.tx_storage.nc_catalog is not None + self.manager.tx_storage.nc_catalog.blueprints = { + MY_BLUEPRINT1_ID: MyBlueprint1, + MY_BLUEPRINT2_ID: MyBlueprint2, + } + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + +class TestNCExecLogs(BaseNCExecLogs): + __test__ = True + + def test_config_all(self) -> None: + self._prepare(nc_log_config=NCLogConfig.ALL) + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = fail() + + nc3.nc_id = nc1 + nc3.nc_method = value_error() + + nc1 <-- nc2 <-- nc3 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + b2 = artifacts.get_typed_vertex('b2', Block) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + + assert len(not_none(self.nc_log_storage.get_logs(nc2.hash)).entries[b2.hash]) > 0 + assert len(not_none(self.nc_log_storage.get_logs(nc3.hash)).entries[b2.hash]) > 0 + + def test_config_none(self) -> None: + self._prepare(nc_log_config=NCLogConfig.NONE) + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = fail() + + nc3.nc_id = nc1 + nc3.nc_method = value_error() + + nc1 <-- nc2 <-- nc3 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + assert self.nc_log_storage.get_logs(nc1.hash) is None + assert self.nc_log_storage.get_logs(nc2.hash) is None + assert self.nc_log_storage.get_logs(nc3.hash) is None + + def test_config_failed(self) -> None: + self._prepare(nc_log_config=NCLogConfig.FAILED) + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = fail() + + nc3.nc_id = nc1 + nc3.nc_method = value_error() + + nc1 <-- nc2 <-- nc3 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + b2 = artifacts.get_typed_vertex('b2', Block) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + assert self.nc_log_storage.get_logs(nc1.hash) is None + assert len(not_none(self.nc_log_storage.get_logs(nc2.hash)).entries[b2.hash]) > 0 + assert len(not_none(self.nc_log_storage.get_logs(nc3.hash)).entries[b2.hash]) > 0 + + def test_config_failed_unhandled(self) -> None: + self._prepare(nc_log_config=NCLogConfig.FAILED_UNHANDLED) + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = fail() + + nc3.nc_id = nc1 + nc3.nc_method = value_error() + + nc1 <-- nc2 <-- nc3 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + b2 = artifacts.get_typed_vertex('b2', Block) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + assert self.nc_log_storage.get_logs(nc1.hash) is None + assert self.nc_log_storage.get_logs(nc2.hash) is None + assert len(not_none(self.nc_log_storage.get_logs(nc3.hash)).entries[b2.hash]) > 0 + + def test_log_levels_and_key_values(self) -> None: + self._prepare() + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = log_levels() + + nc1 <-- nc2 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + b2 = artifacts.get_typed_vertex('b2', Block) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + + assert not_none(self.nc_log_storage.get_logs(nc2.hash)).entries == { + b2.hash: [NCExecEntry( + logs=[ + NCCallBeginEntry.construct( + nc_id=nc1.hash, + call_type=CallType.PUBLIC, + method_name='log_levels', + timestamp=ANY, + actions=[], + ), + NCLogEntry.construct( + level=NCLogLevel.DEBUG, + message='log_levels() called', + key_values=dict(test1='1'), + timestamp=ANY, + ), + NCLogEntry.construct( + level=NCLogLevel.INFO, + message='log_levels() called', + key_values=dict(test2='2'), + timestamp=ANY, + ), + NCLogEntry.construct( + level=NCLogLevel.WARN, + message='log_levels() called', + key_values=dict(test3='3'), + timestamp=ANY, + ), + NCLogEntry.construct( + level=NCLogLevel.ERROR, + message='log_levels() called', + key_values=dict(test4='4'), + timestamp=ANY, + ), + NCCallEndEntry.construct(timestamp=ANY), + ], + )], + } + + # test log level filter + assert not_none(self.nc_log_storage.get_logs(nc2.hash, log_level=NCLogLevel.WARN)).entries == { + b2.hash: [NCExecEntry( + logs=[ + NCLogEntry.construct( + level=NCLogLevel.WARN, + message='log_levels() called', + key_values=dict(test3='3'), + timestamp=ANY, + ), + NCLogEntry.construct( + level=NCLogLevel.ERROR, + message='log_levels() called', + key_values=dict(test4='4'), + timestamp=ANY, + ), + ], + )], + } + + def test_nc_fail(self) -> None: + self._prepare() + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = fail() + + nc1 <-- nc2 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + b2 = artifacts.get_typed_vertex('b2', Block) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + + result = not_none(self.nc_log_storage.get_logs(nc2.hash)) + assert result.entries == { + b2.hash: [NCExecEntry.construct( + error_traceback=ANY, + logs=[ + NCCallBeginEntry.construct( + nc_id=nc1.hash, + call_type=CallType.PUBLIC, + method_name='fail', + timestamp=ANY, + actions=[], + ), + NCLogEntry.construct(level=NCLogLevel.WARN, message='fail() called', timestamp=ANY), + ], + )], + } + + error_tb = result.entries[b2.hash][0].error_traceback + assert error_tb is not None + assert error_tb.startswith('Traceback (most recent call last):') + assert error_tb.endswith('hathor.nanocontracts.exception.NCFail: some fail\n') + + def test_value_error(self) -> None: + self._prepare() + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = value_error() + + nc1 <-- nc2 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + b2 = artifacts.get_typed_vertex('b2', Block) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + + result = not_none(self.nc_log_storage.get_logs(nc2.hash)) + assert result.entries == { + b2.hash: [NCExecEntry.construct( + error_traceback=ANY, + logs=[ + NCCallBeginEntry.construct( + nc_id=nc1.hash, + call_type=CallType.PUBLIC, + method_name='value_error', + timestamp=ANY, + actions=[], + ), + NCLogEntry.construct(level=NCLogLevel.WARN, message='value_error() called', timestamp=ANY), + ], + )], + } + + error_tb = result.entries[b2.hash][0].error_traceback + assert error_tb is not None + assert error_tb.startswith('Traceback (most recent call last):') + assert dedent(""" + ValueError: some value error\n + The above exception was the direct cause of the following exception:\n + Traceback (most recent call last): + """) in error_tb + assert error_tb.endswith('hathor.nanocontracts.exception.NCFail\n') + + def test_reexecution_on_reorgs(self) -> None: + self._prepare() + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..4] + blockchain b1 a[2..3] + b1 < dummy + b2 < a2 < a3 < b3 < b4 + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc1 <-- b2 + nc1 <-- a2 + """) + + nc1 = artifacts.get_typed_vertex('nc1', Transaction) + b2, a2 = artifacts.get_typed_vertices(['b2', 'a2'], Block) + assert nc1.is_nano_contract() + + # 2 reorgs happen, so nc1.initialize() gets executed 3 times, once in block a2 and twice in block b2 + artifacts.propagate_with(self.manager, up_to='b2') + assert nc1.get_metadata().first_block == b2.hash + assert b2.get_metadata().voided_by is None + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + + artifacts.propagate_with(self.manager, up_to='a3') + assert nc1.get_metadata().first_block == a2.hash + assert b2.get_metadata().voided_by == {b2.hash} + assert a2.get_metadata().voided_by is None + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + a2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + + artifacts.propagate_with(self.manager) + assert nc1.get_metadata().first_block == b2.hash + assert b2.get_metadata().voided_by is None + assert a2.get_metadata().voided_by == {a2.hash} + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [ + NCExecEntry( + logs=self._get_initialize_entries(nc1), + ), + NCExecEntry( + logs=self._get_initialize_entries(nc1), + ), + ], + a2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + + def test_call_another_contract_public(self) -> None: + self._prepare() + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + b1 < dummy + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = "{MY_BLUEPRINT2_ID.hex()}" + nc2.nc_method = initialize() + + nc3.nc_id = nc1 + nc3.nc_deposit = 10 HTR + nc3.nc_method = call_another_public(`nc2`) + + nc1.out[0] <<< nc2 + nc2.out[0] <<< nc3 + nc3 <-- b2 + """) + artifacts.propagate_with(self.manager) + + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + b2 = artifacts.get_typed_vertex('b2', Block) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + assert not_none(self.nc_log_storage.get_logs(nc1.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc1), + )], + } + assert not_none(self.nc_log_storage.get_logs(nc2.hash)).entries == { + b2.hash: [NCExecEntry( + logs=self._get_initialize_entries(nc2), + )], + } + + assert not_none(self.nc_log_storage.get_logs(nc3.hash)).entries == { + b2.hash: [NCExecEntry( + error_traceback=None, + logs=[ + NCCallBeginEntry.construct( + nc_id=nc1.hash, + call_type=CallType.PUBLIC, + method_name='call_another_public', + str_args=str((nc2.hash,)), + timestamp=ANY, + actions=[ + dict( + type='deposit', + token_uid='00', + amount=10, + ) + ], + ), + NCLogEntry.construct( + level=NCLogLevel.DEBUG, + message='call_another_public() called on MyBlueprint1', + key_values=dict(contract_id=nc2.hash_hex), + timestamp=ANY, + ), + NCCallBeginEntry.construct( + nc_id=nc2.hash, + call_type=CallType.PUBLIC, + method_name='sum', + str_args=str((1, 2)), + timestamp=ANY, + actions=[ + dict( + type='deposit', + token_uid='00', + amount=5, + ) + ], + ), + NCLogEntry.construct( + level=NCLogLevel.DEBUG, + message='sum() called on MyBlueprint2', + key_values=dict(a='1', b='2'), + timestamp=ANY + ), + NCCallEndEntry.construct(timestamp=ANY), + NCCallBeginEntry.construct( + nc_id=nc2.hash, + call_type=CallType.VIEW, + method_name='hello_world', + timestamp=ANY, + actions=None, + ), + NCLogEntry.construct( + level=NCLogLevel.DEBUG, + message='hello_world() called on MyBlueprint2', + timestamp=ANY, + ), + NCCallEndEntry.construct(timestamp=ANY), + NCLogEntry.construct( + level=NCLogLevel.DEBUG, + message='results on MyBlueprint1', + key_values=dict(result1='3', result2='hello world'), + timestamp=ANY + ), + NCCallEndEntry.construct(timestamp=ANY), + ], + )], + } diff --git a/tests/nanocontracts/test_patricia_trie.py b/tests/nanocontracts/test_patricia_trie.py new file mode 100644 index 000000000..a6b10049d --- /dev/null +++ b/tests/nanocontracts/test_patricia_trie.py @@ -0,0 +1,232 @@ +import hashlib +import tempfile +from math import log +from typing import Optional + +from hathor.nanocontracts.storage.backends import MemoryNodeTrieStore, RocksDBNodeTrieStore +from hathor.nanocontracts.storage.patricia_trie import Node, PatriciaTrie +from hathor.storage.rocksdb_storage import RocksDBStorage +from tests import unittest + + +def export_trie_outline(trie: PatriciaTrie, *, node: Optional[Node] = None) -> tuple[bytes, Optional[bytes], dict]: + """Return the tree outline for testing purposes. + + The returned format is (key, value, list[children]) where each child has the same format. + """ + if node is None: + node = trie.root + + d = {} + for k, child_id in node.children.items(): + child = trie.get_node(child_id) + d[trie._decode_key(k)] = export_trie_outline(trie, node=child) + return (trie._decode_key(node.key), node.content, d) + + +class PatriciaTrieTestCase(unittest.TestCase): + __test__ = False + + def create_trie(self) -> PatriciaTrie: + raise NotImplementedError + + def test_empty_key(self) -> None: + trie = self.create_trie() + with self.assertRaises(KeyError): + trie.get(b'') + + def test_empty_trie(self) -> None: + trie = self.create_trie() + with self.assertRaises(KeyError): + trie.get(b'my-key') + + def test_single_key(self) -> None: + trie = self.create_trie() + key = b'my-key' + + with self.assertRaises(KeyError): + trie.get(key) + + trie.update(key, b'1') + trie.commit() + self.assertEqual(trie.get(key), b'1') + root1_id = trie.root.id + + trie.update(key, b'1') + trie.commit() + self.assertEqual(trie.get(key), b'1') + self.assertEqual(root1_id, trie.root.id) + + trie.update(key, b'2') + trie.commit() + self.assertEqual(trie.get(key), b'2') + + self.assertNotEqual(root1_id, trie.root.id) + self.assertEqual(trie.get(key, root_id=root1_id), b'1') + + n_nodes = len(trie._db) + trie.update(key, b'1') + trie.commit() + self.assertEqual(trie.get(key), b'1') + self.assertEqual(root1_id, trie.root.id) + self.assertEqual(n_nodes, len(trie._db)) + + trie.print_dfs() + + self.assertEqual( + export_trie_outline(trie), + (b'', None, { + key: (key, b'1', {}), + }) + ) + + def test_independent_keys(self) -> None: + trie = self.create_trie() + + key1 = b'\x00abcde' + key2 = b'\x10fghijklmn' + + trie.update(key1, b'1') + trie.commit() + self.assertEqual(trie.get(key1), b'1') + + trie.update(key2, b'2') + trie.commit() + self.assertEqual(trie.get(key2), b'2') + + self.assertEqual(len(trie.root.children), 2) + trie.print_dfs() + + self.assertEqual( + export_trie_outline(trie), + (b'', None, { + key1: (key1, b'1', {}), + key2: (key2, b'2', {}), + }) + ) + + def test_simple_chain(self) -> None: + trie = self.create_trie() + + data = { + b'a': b'1', + b'abcd': b'2', + b'ab': b'3', + b'abcdefg': b'4', + b'abcdefh': b'5', + } + for k, v in data.items(): + trie.update(k, v) + # print('!! UPDATE', k) + # print() + # trie.print_dfs() + # print() + # print() + # print() + # print() + + for k, v in data.items(): + self.assertEqual(trie.get(k), v) + trie.commit() + + self.assertEqual( + export_trie_outline(trie), + (b'', None, { + b'a': (b'a', b'1', { + b'b': (b'ab', b'3', { + b'cd': (b'abcd', b'2', { + b'ef`': (b'abcdef`', None, { + b'p': (b'abcdefg', b'4', {}), + b'\x80': (b'abcdefh', b'5', {}), + }), + }), + }), + }), + }) + ) + + def test_random_data(self) -> None: + trie = self.create_trie() + + data = {} + for v_int in range(20_000): + v = str(v_int).encode('ascii') + k = hashlib.sha1(v).digest() + data[k] = v + trie.update(k, v) + + for k, v in data.items(): + self.assertEqual(trie.get(k), v) + trie.commit() + + max_children = max(len(x.children) for x, _, _ in trie.iter_dfs()) + max_height = max(h for _, h, _ in trie.iter_dfs()) + + print('max_children', max_children) + print('max_height', max_height) + print('n_nodes', len(trie._db)) + + self.assertLessEqual(max_children, 16) + self.assertLessEqual(max_height, 2*log(len(data), 16)) + + def test_commit(self) -> None: + trie = self.create_trie() + + data = {} + for v_int in range(20_000): + v = str(v_int).encode('ascii') + k = hashlib.sha1(v).digest() + data[k] = v + trie.update(k, v) + trie.commit() + root1_id = trie.root.id + + key1, value1 = next(iter(data.items())) + trie.update(key1, value1 + b'1') + self.assertTrue(trie.is_dirty()) + trie.commit() + self.assertFalse(trie.is_dirty()) + root2_id = trie.root.id + + self.assertNotEqual(root1_id, root2_id) + self.assertEqual(trie.get(key1, root_id=root1_id), value1) + self.assertEqual(trie.get(key1, root_id=root2_id), value1 + b'1') + + def test_multiple_keys_same_value(self) -> None: + trie = self.create_trie() + data = { + b'a': b'1', + b'abcd': b'1', + b'ab': b'1', + b'abcdefg': b'1', + b'abcdefh': b'1', + b'\x01xyz': b'1', + } + for k, v in data.items(): + trie.update(k, v) + trie.commit() + + for k, v in data.items(): + self.assertEqual(trie.get(k), v) + + +class MemoryPatriciaTrieTest(PatriciaTrieTestCase): + __test__ = True + + def create_trie(self) -> PatriciaTrie: + store = MemoryNodeTrieStore() + return PatriciaTrie(store) + + +class RocksDBPatriciaTrieTest(PatriciaTrieTestCase): + __test__ = True + + def setUp(self) -> None: + super().setUp() + directory = tempfile.mkdtemp() + self.tmpdirs.append(directory) + self.rocksdb_storage = RocksDBStorage(path=directory) + + def create_trie(self) -> PatriciaTrie: + store = RocksDBNodeTrieStore(self.rocksdb_storage) + return PatriciaTrie(store) diff --git a/tests/nanocontracts/test_reentrancy.py b/tests/nanocontracts/test_reentrancy.py new file mode 100644 index 000000000..c5f94df81 --- /dev/null +++ b/tests/nanocontracts/test_reentrancy.py @@ -0,0 +1,214 @@ +from hathor.nanocontracts import Blueprint, Context, NCFail, public +from hathor.nanocontracts.types import Amount, ContractId, NCAction, NCDepositAction, TokenUid +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +HTR_TOKEN_UID = TokenUid(b'\0') + + +class InsufficientBalance(NCFail): + pass + + +class MyBlueprint(Blueprint): + # I used dict[bytes, int] for two reasons: + # 1. `bytes` works for both Address and ContractId + # 2. int allows negative values + balances: dict[bytes, int] + + @public + def initialize(self, ctx: Context) -> None: + pass + + @public(allow_deposit=True) + def deposit(self, ctx: Context) -> None: + address = ctx.address + action = ctx.get_single_action(HTR_TOKEN_UID) + assert isinstance(action, NCDepositAction) + amount = action.amount + + if address not in self.balances: + self.balances[address] = amount + else: + self.balances[address] += amount + + @public + def transfer_to(self, ctx: Context, amount: Amount, contract: ContractId, method: str) -> None: + address = ctx.address + if amount > self.balances.get(address, 0): + raise InsufficientBalance('insufficient balance') + + actions: list[NCAction] = [NCDepositAction(token_uid=HTR_TOKEN_UID, amount=amount)] + # This contract is vulnerable to reentrancy attack because it is transfering before reducing the balance. + # Another issue is that it doesn't assert self.balances[address] >= 0. + self.syscall.call_public_method(contract, method, actions=actions) + self.balances[address] -= amount + + @public + def fixed_transfer_to(self, ctx: Context, amount: Amount, contract: ContractId, method: str) -> None: + address = ctx.address + if amount > self.balances.get(address, 0): + raise InsufficientBalance('insufficient balance') + + actions: list[NCAction] = [NCDepositAction(token_uid=HTR_TOKEN_UID, amount=amount)] + # This contract is not vulnerable to reentrancy attack. The only difference relies on the moment the balance is + # updated. + self.balances[address] -= amount + self.syscall.call_public_method(contract, method, actions=actions) + + +class AttackerBlueprint(Blueprint): + target: ContractId + amount: Amount + n_calls: int + counter: int + + @public(allow_deposit=True) + def initialize(self, ctx: Context, target: ContractId, n_calls: int) -> None: + self.target = target + self.n_calls = n_calls + self.counter = 0 + + action = ctx.get_single_action(HTR_TOKEN_UID) + assert isinstance(action, NCDepositAction) + self.amount = Amount(action.amount) + + actions: list[NCAction] = [NCDepositAction(token_uid=HTR_TOKEN_UID, amount=self.amount)] + self.syscall.call_public_method(target, 'deposit', actions=actions) + + @public(allow_deposit=True) + def nop(self, ctx: Context) -> None: + pass + + @public(allow_deposit=True) + def attack(self, ctx: Context) -> None: + self._run_attack('transfer_to') + + @public(allow_deposit=True) + def attack_fail(self, ctx: Context) -> None: + self._run_attack('fixed_transfer_to') + + def _run_attack(self, method: str) -> None: + if self.counter >= self.n_calls: + return + + self.counter += 1 + self.syscall.call_public_method( + self.target, + method, + actions=[], + amount=self.amount, + contract=self.syscall.get_contract_id(), + method='attack', + ) + + +class NCReentrancyTestCase(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.target_blueprint_id = self.gen_random_blueprint_id() + self.attacker_blueprint_id = self.gen_random_blueprint_id() + + self.nc_catalog.blueprints[self.target_blueprint_id] = MyBlueprint + self.nc_catalog.blueprints[self.attacker_blueprint_id] = AttackerBlueprint + + self.nc_target_id = self.gen_random_contract_id() + self.nc_attacker_id = self.gen_random_contract_id() + + tx = self.get_genesis_tx() + + self.address1 = self.gen_random_address() + self.address2 = self.gen_random_address() + + ctx = Context([], tx, self.address1, timestamp=0) + self.runner.create_contract(self.nc_target_id, self.target_blueprint_id, ctx) + + self.n_calls = 15 + ctx = Context([NCDepositAction(token_uid=HTR_TOKEN_UID, amount=50)], tx, self.address2, timestamp=0) + self.runner.create_contract( + self.nc_attacker_id, + self.attacker_blueprint_id, + ctx, + target=self.nc_target_id, + n_calls=self.n_calls, + ) + + # Address1 deposits 1.00 HTR + actions: list[NCAction] = [NCDepositAction(token_uid=HTR_TOKEN_UID, amount=1_00)] + ctx = Context(actions, tx, self.address1, timestamp=0) + self.runner.call_public_method(self.nc_target_id, 'deposit', ctx) + + # Address2 deposits 100.00 HTR + actions = [NCDepositAction(token_uid=HTR_TOKEN_UID, amount=100_00)] + ctx = Context(actions, tx, self.address2, timestamp=0) + self.runner.call_public_method(self.nc_target_id, 'deposit', ctx) + + self.target_storage = self.runner.get_storage(self.nc_target_id) + self.attacker_storage = self.runner.get_storage(self.nc_attacker_id) + + assert self.target_storage.get_balance(HTR_TOKEN_UID).value == 10_150 + assert self.attacker_storage.get_balance(HTR_TOKEN_UID).value == 0 + + def test_basics(self) -> None: + tx = self.get_genesis_tx() + + # Address1 sends 0.30 HTR to attacker contract. + ctx = Context([], tx, self.address1, timestamp=0) + self.runner.call_public_method( + self.nc_target_id, + 'transfer_to', + ctx, + amount=30, + contract=self.nc_attacker_id, + method='nop', + ) + + assert self.target_storage.get_balance(HTR_TOKEN_UID).value == 10_150 - 30 + assert self.attacker_storage.get_balance(HTR_TOKEN_UID).value == 0 + 30 + + # Address1 tries to send 0.80 HTR but it fails due to insufficient balance. + # This misleads developers into thinking the safety mechanism is working. + with self.assertRaises(InsufficientBalance): + ctx = Context([], tx, self.address1, timestamp=0) + self.runner.call_public_method( + self.nc_target_id, + 'transfer_to', + ctx, + amount=80, + contract=self.nc_attacker_id, + method='nop', + ) + + assert self.target_storage.get_balance(HTR_TOKEN_UID).value == 10_150 - 30 + assert self.attacker_storage.get_balance(HTR_TOKEN_UID).value == 0 + 30 + + def test_attack_succeed(self) -> None: + tx = self.get_genesis_tx() + + # Attacker contract has a balance of 0.50 HTR in the target contract. + # It tries to extract more than 0.50 HTR and succeeds. + ctx = Context([], tx, self.address1, timestamp=0) + self.runner.call_public_method( + self.nc_attacker_id, + 'attack', + ctx, + ) + + assert self.target_storage.get_balance(HTR_TOKEN_UID).value == 10_150 - self.n_calls * 50 + assert self.attacker_storage.get_balance(HTR_TOKEN_UID).value == self.n_calls * 50 + + def test_attack_fail(self) -> None: + tx = self.get_genesis_tx() + + # Attacker contract has a balance of 0.50 HTR in the target contract. + # It tries to extract more than 0.50 HTR and fails. + with self.assertRaises(InsufficientBalance): + ctx = Context([], tx, self.address1, timestamp=0) + self.runner.call_public_method( + self.nc_attacker_id, + 'attack_fail', + ctx, + ) + + assert self.target_storage.get_balance(HTR_TOKEN_UID).value == 10_150 + assert self.attacker_storage.get_balance(HTR_TOKEN_UID).value == 0 diff --git a/tests/nanocontracts/test_rng.py b/tests/nanocontracts/test_rng.py new file mode 100644 index 000000000..79dcd055c --- /dev/null +++ b/tests/nanocontracts/test_rng.py @@ -0,0 +1,453 @@ +from math import floor, sqrt + +import pytest + +from hathor.conf import HathorSettings +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.rng import NanoRNG +from hathor.nanocontracts.types import ContractId +from hathor.transaction import Transaction +from tests.dag_builder.builder import TestDAGBuilder +from tests.simulation.base import SimulatorTestCase + +settings = HathorSettings() + + +class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + x = self.syscall.rng.random() + if x < 0.5: + raise NCFail('bad luck') + + +class AttackerBlueprint(Blueprint): + target: ContractId + + @public + def initialize(self, ctx: Context, target: ContractId) -> None: + self.target = target + + @public + def attack(self, ctx: Context) -> None: + self.syscall.rng.random = lambda: 0.75 # type: ignore[method-assign] + self.syscall.call_public_method(self.target, 'nop', actions=[]) + + +class NCConsensusTestCase(SimulatorTestCase): + __test__ = True + + def setUp(self): + super().setUp() + + self.myblueprint_id = b'x' * 32 + self.attacker_blueprint_id = b'y' * 32 + self.catalog = NCBlueprintCatalog({ + self.myblueprint_id: MyBlueprint, + self.attacker_blueprint_id: AttackerBlueprint, + }) + + self.manager = self.simulator.create_peer() + self.manager.tx_storage.nc_catalog = self.catalog + + self.wallet = self.manager.wallet + + def test_rng_consistency(self) -> None: + seed = self.rng.randbytes(32) + n = 100_000 + + rng1 = NanoRNG(seed=seed) + v1 = [rng1.randbits(32) for _ in range(n)] + for _ in range(10): + rng2 = NanoRNG(seed=seed) + v2 = [rng2.randbits(32) for _ in range(n)] + assert v1 == v2 + + def test_rng_override(self) -> None: + seed = b'0' * 32 + rng = NanoRNG(seed=seed) + + # + # Existing attribute on instance + # + + # protected by overridden __setattr__ + with pytest.raises(AttributeError, match='Cannot assign methods to this object.'): + rng._NanoRNG__seed = b'1' * 32 + + # protected by overridden __setattr__ + with pytest.raises(AttributeError, match='Cannot assign methods to this object.'): + setattr(rng, '_NanoRNG__seed', b'1' * 32) + + # it doesn't protect against this case + object.__setattr__(rng, '_NanoRNG__seed', b'changed') + assert getattr(rng, '_NanoRNG__seed') == b'changed' + + # + # New attribute on instance + # + + # protected by overridden NanoRNG.__setattr__ + with pytest.raises(AttributeError, match='Cannot assign methods to this object.'): + rng.new_attr = 123 + + # protected by overridden NanoRNG.__setattr__ + with pytest.raises(AttributeError, match='Cannot assign methods to this object.'): + setattr(rng, 'new_attr', 123) + + # protected by __slots__ + with pytest.raises(AttributeError, match="'NanoRNG' object has no attribute 'new_attr'"): + object.__setattr__(rng, 'new_attr', 123) + + # + # Existing method on instance + # + + # protected by overridden NanoRNG.__setattr__ + with pytest.raises(AttributeError, match='Cannot assign methods to this object.'): + rng.random = lambda self: 2 # type: ignore[method-assign, misc, assignment] + + # protected by overridden NanoRNG.__setattr__ + with pytest.raises(AttributeError, match='Cannot assign methods to this object.'): + setattr(rng, 'random', lambda self: 2) + + # protected by overridden NanoRNG.__setattr__ + with pytest.raises(AttributeError, match='Cannot assign methods to this object.'): + from types import MethodType + rng.random = MethodType(lambda self: 2, rng) # type: ignore[method-assign] + + # protected by __slots__ + with pytest.raises(AttributeError, match='\'NanoRNG\' object attribute \'random\' is read-only'): + object.__setattr__(rng, 'random', lambda self: 2) + + # + # Existing method on class + # + + # protected by overridden NoMethodOverrideMeta.__setattr__ + with pytest.raises(AttributeError, match='Cannot override method `random`'): + NanoRNG.random = lambda self: 2 # type: ignore[method-assign] + + # protected by overridden NoMethodOverrideMeta.__setattr__ + with pytest.raises(AttributeError, match='Cannot override method `random`'): + setattr(NanoRNG, 'random', lambda self: 2) + + # protected by Python itself + with pytest.raises(TypeError, match='can\'t apply this __setattr__ to NoMethodOverrideMeta object'): + object.__setattr__(NanoRNG, 'random', lambda self: 2) + + # + # Existing method on __class__ + # + + # protected by overridden NoMethodOverrideMeta.__setattr__ + with pytest.raises(AttributeError, match='Cannot override method `random`'): + rng.__class__.random = lambda self: 2 # type: ignore[method-assign] + + # protected by overridden NoMethodOverrideMeta.__setattr__ + with pytest.raises(AttributeError, match='Cannot override method `random`'): + setattr(rng.__class__, 'random', lambda self: 2) + + # protected by Python itself + with pytest.raises(TypeError, match='can\'t apply this __setattr__ to NoMethodOverrideMeta object'): + object.__setattr__(rng.__class__, 'random', lambda self: 2) + + # + # New attribute on class + # + + # protected by overridden NoMethodOverrideMeta.__setattr__ + with pytest.raises(AttributeError, match='Cannot override method `new_attr`'): + NanoRNG.new_attr = 123 + + # protected by overridden NoMethodOverrideMeta.__setattr__ + with pytest.raises(AttributeError, match='Cannot override method `new_attr`'): + setattr(NanoRNG, 'new_attr', 123) + + # protected by Python itself + with pytest.raises(TypeError, match='can\'t apply this __setattr__ to NoMethodOverrideMeta object'): + object.__setattr__(NanoRNG, 'new_attr', 123) + + assert rng.random() < 1 + + def test_rng_shell_class(self) -> None: + seed = b'0' * 32 + rng1 = NanoRNG.create_with_shell(seed=seed) + rng2 = NanoRNG.create_with_shell(seed=seed) + + assert rng1.__class__ != rng2.__class__ + + with pytest.raises(AttributeError, match='Cannot override method `random`'): + rng1.__class__.random = lambda self: 2 # type: ignore[method-assign] + + with pytest.raises(AttributeError, match='Cannot override method `random`'): + setattr(rng1.__class__, 'random', lambda self: 2) + + with pytest.raises(TypeError, match='can\'t apply this __setattr__ to NoMethodOverrideMeta object'): + object.__setattr__(rng1.__class__, 'random', lambda self: 2) + + def assertGoodnessOfFitTest(self, observed: list[int], expected: list[int]) -> None: + """Pearson chi-square goodness-of-fit test for uniform [0, 1)""" + assert len(observed) == len(expected) + size = len(expected) + N = sum(expected) + assert N == sum(observed) + + # chi2 = sum((observed[k] - expected[k])**2 / expected[k] for k in range(size)) + # After some algebra, the equation above turns out to be: + # chi2 = sum(observed[k]**2 / expected[k] for k in range(size)) - N + df = 0 + chi2 = 0. + for k in range(size): + if expected[k] == 0: + assert observed[k] == 0 + else: + chi2 += observed[k]**2 / expected[k] + df += 1 + chi2 -= N + df -= 1 + + # assumption so we can approximate the chi2 distribution by a normal distribution + # with mean df and variance 2*df. + assert df >= 30 + + z_score = (chi2 - df) / sqrt(2 * df) + L = 3 + + # The probability of -L < z_score < L is: phi(L) - phi(-L) + # where phi(x) is the cdf of the standard normal distribution + # For L = 3, it is 99.73%. + # In other words, this assert should pass 99.73% of the runs. + assert -L < z_score < L + + def test_rng_randbits(self) -> None: + seed = self.rng.randbytes(32) + rng = NanoRNG(seed=seed) + + size = 4096 # keep it a power of 2 + expected = 100 + frequencies = [0] * size + for _ in range(expected * size): + idx = rng.randbits(32) % size + frequencies[idx] += 1 + + self.assertGoodnessOfFitTest(frequencies, [expected] * size) + + def test_rng_randbelow(self) -> None: + seed = self.rng.randbytes(32) + rng = NanoRNG(seed=seed) + + size = 10_000 + expected = 100 + frequencies = [0] * size + for _ in range(expected * size): + idx = rng.randbelow(size) + frequencies[idx] += 1 + + self.assertGoodnessOfFitTest(frequencies, [expected] * size) + + def test_rng_randint(self) -> None: + seed = self.rng.randbytes(32) + rng = NanoRNG(seed=seed) + + size = 10_000 + expected = 100 + frequencies = [0] * size + + a = 150_000 + b = a + size - 1 + for _ in range(expected * size): + idx = rng.randint(a, b) - a + frequencies[idx] += 1 + + self.assertGoodnessOfFitTest(frequencies, [expected] * size) + + def test_rng_choice(self) -> None: + seed = self.rng.randbytes(32) + rng = NanoRNG(seed=seed) + + size = 10_000 + expected = 100 + frequencies = [0] * size + + v = list(range(size)) + for _ in range(expected * size): + idx = rng.choice(v) + frequencies[idx] += 1 + + self.assertGoodnessOfFitTest(frequencies, [expected] * size) + + def test_rng_randrange_small(self) -> None: + seed = self.rng.randbytes(32) + rng = NanoRNG(seed=seed) + + size = 10_000 + expected_per_bin = 500 + frequencies = [0] * size + + start = 15 + stop = size + step = 7 + + valid = set(range(start, stop, step)) + expected = [expected_per_bin if idx in valid else 0 for idx in range(size)] + + for _ in range(expected_per_bin * len(valid)): + idx = rng.randrange(start, stop, step) + assert idx in valid + frequencies[idx] += 1 + + self.assertGoodnessOfFitTest(frequencies, expected) + + def test_rng_randrange_large(self) -> None: + seed = self.rng.randbytes(32) + rng = NanoRNG(seed=seed) + + size = 1007 + expected = 1000 + frequencies = [0] * size + + start = 15_000_000 + stop = 20_000_000_000 + step = (stop - start + size - 1) // size + + for _ in range(expected * size): + x = rng.randrange(start, stop, step) + assert (x - start) % step == 0 + idx = (x - start) // step + frequencies[idx] += 1 + + self.assertGoodnessOfFitTest(frequencies, [expected] * size) + + def test_rng_random(self) -> None: + seed = self.rng.randbytes(32) + rng = NanoRNG(seed=seed) + + size = 200 + expected = 1000 + frequencies = [0] * size + for _ in range(expected * size): + x = rng.random() + assert 0 <= x < 1 + idx = floor(size * x) + frequencies[idx] += 1 + + self.assertGoodnessOfFitTest(frequencies, [expected] * size) + + def test_simple_rng(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + + n = 250 + nc_calls_parts = [] + for i in range(2, n + 2): + nc_calls_parts.append(f''' + nc{i}.nc_id = nc1 + nc{i}.nc_method = nop() + nc{i} --> nc{i-1} + ''') + nc_calls = ''.join(nc_calls_parts) + + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize() + + {nc_calls} + + nc{n+1} <-- b32 + ''') + + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + nc1, = artifacts.get_typed_vertices(['nc1'], Transaction) + assert nc1.is_nano_contract() + assert nc1.get_metadata().voided_by is None + + names = [f'nc{i}' for i in range(2, n + 2)] + vertices = artifacts.get_typed_vertices(names, Transaction) + + success = 0 + fail = 0 + for v in vertices: + assert v.is_nano_contract() + if v.get_metadata().voided_by is None: + success += 1 + else: + fail += 1 + self.assertEqual(n, fail + success) + + p = 0.5 + ratio = success / n + + # success ~ Binomial(n=250, p=0.5) + # For n large, Binomial(n, p) ~ N(n*p, n*p*(1-p)) + # So, ratio ~ N(p, p*(1-p)/n) + + z_score = (ratio - p) / (p * (1 - p) / n)**0.5 + L = 3 + + # The probability of -L < z_score < L is: phi(L) - phi(-L) + # where phi(x) is the cdf of the standard normal distribution + # For L = 3, it is 99.73%. + # In other words, this assert should pass 99.73% of the runs. + assert -L < z_score < L + + def test_attack(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + + n = 250 + nc_calls_parts = [] + for i in range(3, n + 3): + nc_calls_parts.append(f''' + nc{i}.nc_id = nc2 + nc{i}.nc_method = attack() + nc{i} --> nc{i-1} + ''') + nc_calls = ''.join(nc_calls_parts) + + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = "{self.attacker_blueprint_id.hex()}" + nc2.nc_method = initialize(`nc1`) + nc2 --> nc1 + + {nc_calls} + + nc{n+2} <-- b32 + ''') + + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + nc1, = artifacts.get_typed_vertices(['nc1'], Transaction) + assert nc1.is_nano_contract() + assert nc1.get_metadata().voided_by is None + + names = [f'nc{i}' for i in range(3, n + 3)] + vertices = artifacts.get_typed_vertices(names, Transaction) + + success = 0 + fail = 0 + for v in vertices: + assert v.is_nano_contract() + assert v.get_metadata().nc_execution is not None + if v.get_metadata().voided_by is None: + success += 1 + else: + fail += 1 + self.assertEqual(0, success) + self.assertEqual(n, fail) diff --git a/tests/nanocontracts/test_seqnum.py b/tests/nanocontracts/test_seqnum.py new file mode 100644 index 000000000..6d3a3b11c --- /dev/null +++ b/tests/nanocontracts/test_seqnum.py @@ -0,0 +1,490 @@ +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, public +from hathor.nanocontracts.exception import NCFail +from hathor.transaction import Block, Transaction +from hathor.transaction.nc_execution_state import NCExecutionState +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.utils import assert_nc_failure_reason + + +class MyBlueprint1(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + pass + + @public + def fail(self, ctx: Context) -> None: + raise NCFail('oops') + + +class NCBlueprintTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.blueprint1_id = self._register_blueprint_class(MyBlueprint1) + + def test_seqnum_fail_after_success(self) -> None: + """tx2 will successfully execute, so tx3 will fail because it has the same seqnum.""" + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..34] + b30 < dummy + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize() + + tx2.nc_id = nc1 + tx2.nc_method = nop() + tx2.nc_address = wallet1 + tx2.nc_seqnum = 0 + + tx3.nc_id = nc1 + tx3.nc_method = nop() + tx3.nc_address = wallet1 + tx3.nc_seqnum = 0 + tx3 --> tx2 + + nc1 <-- b31 + tx2 <-- b31 + tx3 <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + nc1, tx2, tx3 = artifacts.get_typed_vertices(['nc1', 'tx2', 'tx3'], Transaction) + b32 = artifacts.get_typed_vertex('b32', Block) + + assert nc1.get_metadata().voided_by is None + assert tx2.get_metadata().voided_by is None + assert tx3.get_metadata().voided_by == {tx3.hash, NC_EXECUTION_FAIL_ID} + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx2.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx3.get_metadata().nc_execution is NCExecutionState.FAILURE + + tx2_nano_header = tx2.get_nano_header() + tx3_nano_header = tx3.get_nano_header() + + assert tx2_nano_header.nc_address == tx3_nano_header.nc_address + assert tx2_nano_header.nc_seqnum == tx3_nano_header.nc_seqnum + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx3.hash, + block_id=b32.hash, + reason='NCFail: invalid seqnum (diff=0)' + ) + + def test_seqnum_fail_after_fail(self) -> None: + """tx2 will fail execution but it should increase the seqnum anyways. + So tx3 will fail because it has the same seqnum.""" + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..34] + b30 < dummy + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize() + + tx2.nc_id = nc1 + tx2.nc_method = fail() + tx2.nc_address = wallet1 + tx2.nc_seqnum = 0 + + tx3.nc_id = nc1 + tx3.nc_method = nop() + tx3.nc_address = wallet1 + tx3.nc_seqnum = 0 + tx3 --> tx2 + + nc1 <-- b31 + tx2 <-- b31 + tx3 <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + nc1, tx2, tx3 = artifacts.get_typed_vertices(['nc1', 'tx2', 'tx3'], Transaction) + b31, b32 = artifacts.get_typed_vertices(['b31', 'b32'], Block) + + assert nc1.get_metadata().voided_by is None + assert tx2.get_metadata().voided_by == {tx2.hash, NC_EXECUTION_FAIL_ID} + assert tx3.get_metadata().voided_by == {tx3.hash, NC_EXECUTION_FAIL_ID} + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx2.get_metadata().nc_execution is NCExecutionState.FAILURE + assert tx3.get_metadata().nc_execution is NCExecutionState.FAILURE + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx2.hash, + block_id=b31.hash, + reason='NCFail: oops' + ) + + tx2_nano_header = tx2.get_nano_header() + tx3_nano_header = tx3.get_nano_header() + + assert tx2_nano_header.nc_address == tx3_nano_header.nc_address + assert tx2_nano_header.nc_seqnum == tx3_nano_header.nc_seqnum + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx3.hash, + block_id=b32.hash, + reason='NCFail: invalid seqnum (diff=0)' + ) + + def test_seqnum_fail_after_skip(self) -> None: + """tx2 will skip execution but it should increase the seqnum anyways. + So tx3 will fail because it has the same seqnum.""" + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..34] + b30 < dummy + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize() + + tx1.nc_id = nc1 + tx1.nc_method = fail() + tx1.out[0] <<< tx2 + + tx2.nc_id = nc1 + tx2.nc_method = nop() + tx2.nc_address = wallet1 + tx2.nc_seqnum = 0 + + tx3.nc_id = nc1 + tx3.nc_method = nop() + tx3.nc_address = wallet1 + tx3.nc_seqnum = 0 + tx3 --> tx2 + + nc1 <-- b31 + tx2 <-- b31 + tx3 <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + nc1, tx1, tx2, tx3 = artifacts.get_typed_vertices(['nc1', 'tx1', 'tx2', 'tx3'], Transaction) + b31, b32 = artifacts.get_typed_vertices(['b31', 'b32'], Block) + + assert nc1.get_metadata().voided_by is None + assert tx1.get_metadata().voided_by == {tx1.hash, NC_EXECUTION_FAIL_ID} + assert tx2.get_metadata().voided_by == {tx1.hash} + assert tx3.get_metadata().voided_by == {tx3.hash, NC_EXECUTION_FAIL_ID} + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx1.get_metadata().nc_execution is NCExecutionState.FAILURE + assert tx2.get_metadata().nc_execution is NCExecutionState.SKIPPED + assert tx3.get_metadata().nc_execution is NCExecutionState.FAILURE + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx1.hash, + block_id=b31.hash, + reason='NCFail: oops' + ) + + tx2_nano_header = tx2.get_nano_header() + tx3_nano_header = tx3.get_nano_header() + + assert tx2_nano_header.nc_address == tx3_nano_header.nc_address + assert tx2_nano_header.nc_seqnum == tx3_nano_header.nc_seqnum + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx3.hash, + block_id=b32.hash, + reason='NCFail: invalid seqnum (diff=0)' + ) + + def test_seqnum_fail_max_jump(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..34] + b30 < dummy + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize() + nc1.nc_address = wallet1 + nc1.nc_seqnum = 0 + + tx2.nc_id = nc1 + tx2.nc_method = nop() + tx2.nc_address = wallet1 + tx2.nc_seqnum = 11 + + nc1 <-- tx2 <-- b31 + ''') + + artifacts.propagate_with(self.manager) + + nc1, tx2 = artifacts.get_typed_vertices(['nc1', 'tx2'], Transaction) + b31 = artifacts.get_typed_vertex('b31', Block) + + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + + assert tx2.get_metadata().voided_by == {tx2.hash, NC_EXECUTION_FAIL_ID} + assert tx2.get_metadata().nc_execution is NCExecutionState.FAILURE + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx2.hash, + block_id=b31.hash, + reason='NCFail: invalid seqnum (diff=11)' + ) + + nc1_nano_header = nc1.get_nano_header() + tx2_nano_header = tx2.get_nano_header() + + assert nc1_nano_header.nc_address == tx2_nano_header.nc_address + + def test_invalid_block(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..34] + b30 < dummy + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize() + + tx2.nc_id = nc1 + tx2.nc_method = nop() + tx2.nc_address = wallet1 + tx2.nc_seqnum = 0 + + tx3.nc_id = nc1 + tx3.nc_method = nop() + tx3.nc_address = wallet1 + tx3.nc_seqnum = 0 + tx2.out[0] <<< tx3 + + tx4.nc_id = nc1 + tx4.nc_method = nop() + tx4.nc_address = wallet1 + tx4.nc_seqnum = 1 + tx4 --> tx3 + + nc1 <-- b31 + tx4 <-- b32 + ''') + + artifacts.propagate_with(self.manager) + + nc1, tx2, tx3, tx4 = artifacts.get_typed_vertices(['nc1', 'tx2', 'tx3', 'tx4'], Transaction) + b32 = artifacts.get_typed_vertex('b32', Block) + + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + + assert tx2.get_metadata().voided_by is None + assert tx2.get_metadata().nc_execution is NCExecutionState.SUCCESS + + assert tx3.get_metadata().voided_by == {tx3.hash, NC_EXECUTION_FAIL_ID} + assert tx3.get_metadata().nc_execution is NCExecutionState.FAILURE + + assert tx4.get_metadata().voided_by is None + assert tx4.get_metadata().nc_execution is NCExecutionState.SUCCESS + + assert b32.get_metadata().voided_by is None + + tx2_nano_header = tx2.get_nano_header() + tx3_nano_header = tx3.get_nano_header() + + assert tx2_nano_header.nc_address == tx3_nano_header.nc_address + assert tx2_nano_header.nc_seqnum == tx3_nano_header.nc_seqnum + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx3.hash, + block_id=b32.hash, + reason='NCFail: invalid seqnum (diff=0)' + ) + + def test_circular_dependency(self) -> None: + """ + nc3 has the same address as nc1, and it uses nc2 which spends from nc1, so there's an indirect dependency. + However, nc3.seqnum < nc1.seqnum. + """ + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..11] + b10 < dummy + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize() + nc1.nc_address = wallet1 + nc1.nc_seqnum = 2 + + nc2.nc_id = "{self.blueprint1_id.hex()}" + nc2.nc_method = initialize() + nc1.out[0] <<< nc2 + + nc3.nc_id = nc2 + nc3.nc_method = nop() + nc3.nc_address = wallet1 + nc3.nc_seqnum = 1 + + nc2 <-- b11 + nc3 <-- b11 + ''') + + artifacts.propagate_with(self.manager) + + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + b11 = artifacts.get_typed_vertex('b11', Block) + + nc1_nano_header = nc1.get_nano_header() + nc2_nano_header = nc2.get_nano_header() + nc3_nano_header = nc3.get_nano_header() + assert nc1_nano_header.nc_address != nc2_nano_header.nc_address + assert nc1_nano_header.nc_address == nc3_nano_header.nc_address + assert nc1_nano_header.nc_seqnum > nc3_nano_header.nc_seqnum + + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert nc1.get_metadata().voided_by is None + + assert nc2.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert nc2.get_metadata().voided_by is None + + assert nc3.get_metadata().nc_execution is NCExecutionState.FAILURE + assert nc3.get_metadata().voided_by == {nc3.hash, NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=nc3.hash, + block_id=b11.hash, + reason='NCFail: invalid seqnum (diff=-1)' + ) + + def test_timestamp_rule(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc0.nc_id = "{self.blueprint1_id.hex()}" + nc0.nc_method = initialize() + + nc1.nc_id = nc0 + nc1.nc_method = nop() + nc1.nc_address = wallet1 + nc1.nc_seqnum = 2 + + nc2.nc_id = nc0 + nc2.nc_method = nop() + nc2.nc_address = wallet1 + nc2.nc_seqnum = 1 + + nc1 < nc2 + nc0 <-- b11 + nc1 <-- b12 + nc2 <-- b12 + ''') + + artifacts.propagate_with(self.manager) + b12 = artifacts.get_typed_vertex('b12', Block) + nc0, nc1, nc2 = artifacts.get_typed_vertices(['nc0', 'nc1', 'nc2'], Transaction) + + nc1_nano_header = nc1.get_nano_header() + nc2_nano_header = nc2.get_nano_header() + assert nc1_nano_header.nc_address == nc2_nano_header.nc_address + assert nc1_nano_header.nc_seqnum > nc2_nano_header.nc_seqnum + assert nc1.timestamp < nc2.timestamp + + assert nc0.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert nc0.get_metadata().voided_by is None + + # The execution order of nc1 and nc2 is random because even though nc1.seqnum > nc2.seqnum, the timestamp + # rule makes this order not guaranteed. + # - When we execute nc1 before nc2, nc1 succeeds and nc2 fails. + # - When we execute nc1 after nc2, both succeed. + + assert nc1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert nc1.get_metadata().voided_by is None + + if nc2.get_metadata().nc_execution is NCExecutionState.FAILURE: + assert nc2.get_metadata().voided_by == {nc2.hash, NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=nc2.hash, + block_id=b12.hash, + reason='NCFail: invalid seqnum (diff=-1)' + ) + + def test_multiple_txs_same_seqnum(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc0.nc_id = "{self.blueprint1_id.hex()}" + nc0.nc_method = initialize() + + nc1.nc_id = nc0 + nc1.nc_method = nop() + nc1.nc_address = wallet1 + nc1.nc_seqnum = 1 + + nc2.nc_id = nc0 + nc2.nc_method = nop() + nc2.nc_address = wallet1 + nc2.nc_seqnum = 1 + + nc3.nc_id = nc0 + nc3.nc_method = nop() + nc3.nc_address = wallet1 + nc3.nc_seqnum = 2 + + nc4.nc_id = nc0 + nc4.nc_method = nop() + nc4.nc_address = wallet1 + nc4.nc_seqnum = 2 + + nc5.nc_id = nc0 + nc5.nc_method = nop() + nc5.nc_address = wallet1 + nc5.nc_seqnum = 3 + + nc6.nc_id = nc0 + nc6.nc_method = nop() + nc6.nc_address = wallet1 + nc6.nc_seqnum = 3 + + nc0 <-- b11 + nc1 <-- nc2 <-- nc3 <-- nc4 <-- nc5 <-- nc6 <-- b12 + ''') + + artifacts.propagate_with(self.manager) + nc0, nc1, nc2, nc3, nc4, nc5, nc6 = artifacts.get_typed_vertices( + ['nc0', 'nc1', 'nc2', 'nc3', 'nc4', 'nc5', 'nc6'], + Transaction, + ) + + nc1_nano_header = nc1.get_nano_header() + nc2_nano_header = nc2.get_nano_header() + nc3_nano_header = nc3.get_nano_header() + nc4_nano_header = nc4.get_nano_header() + nc5_nano_header = nc5.get_nano_header() + nc6_nano_header = nc6.get_nano_header() + assert len({ + nc1_nano_header.nc_address, + nc2_nano_header.nc_address, + nc3_nano_header.nc_address, + nc4_nano_header.nc_address, + nc5_nano_header.nc_address, + nc6_nano_header.nc_address, + }) == 1 + assert nc1_nano_header.nc_seqnum == nc2_nano_header.nc_seqnum + assert nc3_nano_header.nc_seqnum == nc4_nano_header.nc_seqnum + assert nc5_nano_header.nc_seqnum == nc6_nano_header.nc_seqnum + + assert nc0.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert nc0.get_metadata().voided_by is None + + expected_states = {NCExecutionState.SUCCESS, NCExecutionState.FAILURE} + assert {nc1.get_metadata().nc_execution, nc2.get_metadata().nc_execution} == expected_states + assert {nc3.get_metadata().nc_execution, nc4.get_metadata().nc_execution} == expected_states + assert {nc5.get_metadata().nc_execution, nc6.get_metadata().nc_execution} == expected_states diff --git a/tests/nanocontracts/test_serializer.py b/tests/nanocontracts/test_serializer.py new file mode 100644 index 000000000..35975f3c9 --- /dev/null +++ b/tests/nanocontracts/test_serializer.py @@ -0,0 +1,118 @@ +from typing import Optional, TypeVar + +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.types import SignedData +from tests import unittest + +T = TypeVar('T') + + +class NCSerializerTestCase(unittest.TestCase): + def _run_test(self, type_: type[T], result: T) -> None: + nc_type = make_nc_type(type_) + result_bytes = nc_type.to_bytes(result) + result2: T = nc_type.from_bytes(result_bytes) + self.assertEqual(result, result2) + + def _run_test_signed(self, type_: type[T], result: T) -> None: + from hathor.wallet import KeyPair + + nc_type = make_nc_type(type_) + result_bytes = nc_type.to_bytes(result) + result2: T = nc_type.from_bytes(result_bytes) + self.assertEqual(result, result2) + + # Oracle's private key. + key = KeyPair.create(b'my-key') + script_input = key.p2pkh_create_input_data(b'my-key', result_bytes) + # XXX: ignoring valid-type because type_ can and must be used with SignedData + signed_result: SignedData[T] = SignedData[type_](result, script_input) # type: ignore[valid-type] + signeddata_nc_type = make_nc_type(SignedData[type_]) # type: ignore[valid-type] + serialized_bytes = signeddata_nc_type.to_bytes(signed_result) + signed_result2: SignedData[T] = signeddata_nc_type.from_bytes(serialized_bytes) + self.assertEqual(signed_result.data, signed_result2.data) + self.assertEqual(signed_result.script_input, signed_result2.script_input) + + def _run_test_nc_type(self, nc_type: NCType[T], result: T) -> None: + result_bytes = nc_type.to_bytes(result) + result2: T = nc_type.from_bytes(result_bytes) + self.assertEqual(result, result2) + + def test_signed_bytes(self): + self._run_test_signed(bytes, b'1x1') + + def test_signed_str(self): + self._run_test_signed(str, '1x1') + + def test_signed_bool(self): + self._run_test_signed(bool, True) + + def test_signed_invalid_type(self): + # XXX: list must be given a type argument, otherwise we cannot choose the inner parser, which is needed + # even if the list is empty, in this test we're checking that it will error + with self.assertRaises(TypeError): + self._run_test_signed(list, []) + + def test_invalid_bool(self): + from hathor.nanocontracts.nc_types import BoolNCType + bool_nc_type = BoolNCType() + with self.assertRaises(ValueError): + bool_nc_type.from_bytes(b'\x02') + + def test_str_empty(self): + self._run_test(str, '') + + def test_str_valid(self): + self._run_test(str, 'hathor') + + def test_str_accents(self): + self._run_test(str, 'áéíóúçãõ') + + def test_bytes_empty(self): + self._run_test(bytes, b'') + + def test_bytes_valid(self): + self._run_test(bytes, b'\x01\x02') + + def test_int_negative(self): + self._run_test(int, -100) + + def test_int_zero(self): + self._run_test(int, 0) + + def test_int_positive(self): + self._run_test(int, 100) + + def test_int_too_big(self): + from hathor.nanocontracts.nc_types import Int32NCType + with self.assertRaises(ValueError): + # this fails because Int32NCType's range is [-2**31, 2**31) + self._run_test_nc_type(Int32NCType(), 2**31) + # but this doesn't fail because int maps to VarInt32NCType + self._run_test(int, 2**31) + with self.assertRaises(ValueError): + # which has a larger, but still limited range, so this will fail: + self._run_test(int, 2**223) + + def test_optional_str_none(self): + self._run_test(Optional[str], None) + self._run_test(str | None, None) + + def test_optional_str_empty(self): + self._run_test(Optional[str], '') + self._run_test(str | None, '') + + def test_optional_str(self): + self._run_test(Optional[str], 'hathor') + self._run_test(str | None, 'hathor') + + def test_tuple(self): + self._run_test(tuple[int, str, bytes], (1, 'a', b'b')) + + def test_tuple_optional_str(self): + type_ = tuple[int, Optional[str]] + self._run_test(type_, (1, 'a')) + + def test_tuple_optional_none(self): + type_ = tuple[int, Optional[str]] + self._run_test(type_, (1, None)) diff --git a/tests/nanocontracts/test_sorter.py b/tests/nanocontracts/test_sorter.py new file mode 100644 index 000000000..c3b890cab --- /dev/null +++ b/tests/nanocontracts/test_sorter.py @@ -0,0 +1,202 @@ +from hathor.nanocontracts.sorter.random_sorter import NCBlockSorter +from hathor.transaction import Transaction +from hathor.types import VertexId +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + + +class NCBlockSorterTestCase(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + + self.nodes = {} + for i in range(100): + self.nodes[i] = VertexId(f'{i}'.encode('ascii')) + + self.nc_nodes = {} + for i in range(99): + self.nc_nodes[i] = VertexId(f'nc-{i}'.encode('ascii')) + + def test_all_independent(self) -> None: + sorter = NCBlockSorter(set(self.nodes.values())) + for node in self.nodes.values(): + sorter.get_node(node) + + seed = self.rng.randbytes(32) + order = sorter.copy().generate_random_topological_order(seed) + self.assertEqual(len(self.nodes), len(set(order))) + + order2 = sorter.copy().generate_random_topological_order(seed) + self.assertEqual(order, order2) + + # There are n! permutations. + # Therefore, the probability of getting the same order is 1/100!, which is around 1e-158. + for _ in range(100): + seed2 = self.rng.randbytes(32) + order2 = sorter.copy().generate_random_topological_order(seed2) + self.assertNotEqual(order, order2) + + def test_single_one_step_dependencies(self) -> None: + sorter = NCBlockSorter(set(self.nc_nodes.values())) + + # Generate the following graph: + # 0 -> NC0 -> 1 -> NC1 -> 2 -> NC2 -> 3 -> ... + for i in range(len(self.nodes) - 1): + sorter.add_edge(self.nodes[i], self.nc_nodes[i]) + sorter.add_edge(self.nc_nodes[i], self.nodes[i + 1]) + + seed = self.rng.randbytes(32) + order = sorter.copy().generate_random_topological_order(seed) + self.assertEqual(set(self.nc_nodes.values()), set(order)) + + # There's only one valid order. So it must return the same order for any seed. + for _ in range(100): + seed2 = self.rng.randbytes(32) + order2 = sorter.copy().generate_random_topological_order(seed2) + self.assertEqual(order, order2) + + def test_single_long_dependencies(self) -> None: + sorter = NCBlockSorter(set(self.nc_nodes.values())) + + # Generate the following graph: + # 0 -> NC0 -> 1 -> 2 -> 3 -> 4 -> NC4 -> 5 -> 6 -> 7 -> 8 -> NC8 -> ... + for i in range(len(self.nodes) - 1): + if i % 4 == 0: + sorter.add_edge(self.nodes[i], self.nc_nodes[i]) + sorter.add_edge(self.nc_nodes[i], self.nodes[i + 1]) + else: + sorter.add_edge(self.nodes[i], self.nodes[i + 1]) + + seed = self.rng.randbytes(32) + order = sorter.copy().generate_random_topological_order(seed) + self.assertEqual(set(x for i, x in self.nc_nodes.items() if i % 4 == 0), set(order)) + + # There's only one valid order. So it must return the same order for any seed. + for _ in range(100): + seed2 = self.rng.randbytes(32) + order2 = sorter.copy().generate_random_topological_order(seed2) + self.assertEqual(order, order2) + + def test_linear_multiple_dependencies(self) -> None: + sorter = NCBlockSorter(set(self.nc_nodes.values())) + sorter.add_edge(self.nc_nodes[0], self.nodes[1]) + sorter.add_edge(self.nodes[1], self.nodes[2]) + sorter.add_edge(self.nodes[2], self.nodes[3]) + sorter.add_edge(self.nodes[3], self.nodes[4]) + sorter.add_edge(self.nodes[4], self.nc_nodes[5]) + + seed = self.rng.randbytes(32) + order = sorter.copy().generate_random_topological_order(seed) + self.assertEqual(order, [ + self.nc_nodes[5], + self.nc_nodes[0], + ]) + + def test_grid_multiple_dependencies(self) -> None: + sorter = NCBlockSorter(set(self.nc_nodes.values())) + + idx = 0 + n_layers = 10 + n_per_layer = 8 + layers: list[list[VertexId]] = [] + + selected_nc_nodes = {1, 57, 75} + + for _ in range(n_layers): + current = [] + for j in range(n_per_layer): + if idx in selected_nc_nodes: + vertex_id = self.nc_nodes[idx] + else: + vertex_id = self.nodes[idx] + current.append(vertex_id) + idx += 1 + + _ = sorter.get_node(vertex_id) + if layers: + previous = layers[-1] + if j > 0: + sorter.add_edge(previous[j - 1], vertex_id) + sorter.add_edge(previous[j], vertex_id) + layers.append(current) + + seed = self.rng.randbytes(32) + order = sorter.copy().generate_random_topological_order(seed) + self.assertEqual(order, [ + self.nc_nodes[75], + self.nc_nodes[57], + self.nc_nodes[1], + ]) + + # There's only one valid order. So it must return the same order for any seed. + for _ in range(100): + seed2 = self.rng.randbytes(32) + order2 = sorter.copy().generate_random_topological_order(seed2) + self.assertEqual(order, order2) + + def test_dag_dependencies(self) -> None: + builder = self.get_builder() + builder.enable_nc_anti_mev() + manager = self.create_peer_from_builder(builder) + dag_builder = TestDAGBuilder.from_manager(manager) + + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + artifacts = dag_builder.build_from_str(f""" + blockchain genesis b[1..32] + b30 < dummy + + nc1.nc_id = ocb1 + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = nop() + + nc3.nc_id = nc1 + nc3.nc_method = nop() + + nc4.nc_id = nc1 + nc4.nc_method = nop() + + nc5.nc_id = nc1 + nc5.nc_method = nop() + + nc6.nc_id = nc1 + nc6.nc_method = nop() + + b31 --> ocb1 # OCB must be confirmed before being used to create a contract + b31 < nc1 + nc1 <-- nc2 <-- b32 + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + ocb1.ocb_code = ``` + from hathor.nanocontracts import Blueprint + from hathor.nanocontracts.context import Context + from hathor.nanocontracts.types import public + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + pass + __blueprint__ = MyBlueprint + ``` + """) + + artifacts.propagate_with(manager) + + ocb1, nc1 = artifacts.get_typed_vertices(['ocb1', 'nc1'], Transaction) + + nc_others = [] + for i in range(2, 7): + nc_others.append(artifacts.get_typed_vertex(f'nc{i}', Transaction)) + + assert ocb1.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + + for tx in nc_others: + # TODO Assert the execution order. + assert tx.get_metadata().voided_by is None diff --git a/tests/nanocontracts/test_storage.py b/tests/nanocontracts/test_storage.py new file mode 100644 index 000000000..6b3eddd97 --- /dev/null +++ b/tests/nanocontracts/test_storage.py @@ -0,0 +1,135 @@ +from typing import TypeVar + +from hathor.nanocontracts.nc_types import NCType, NullNCType, make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.storage import NCChangesTracker +from hathor.nanocontracts.types import Amount, ContractId, Timestamp, VertexId +from tests import unittest + +T = TypeVar('T') + +STR_NC_TYPE = make_nc_type(str) +BYTES_NC_TYPE = make_nc_type(bytes) +INT_NC_TYPE = make_nc_type(int) +BOOL_NC_TYPE = make_nc_type(bool) + + +class NCMemoryStorageTestCase(unittest.TestCase): + def setUp(self) -> None: + from hathor.nanocontracts.storage import NCMemoryStorageFactory + factory = NCMemoryStorageFactory() + block_storage = factory.get_empty_block_storage() + self.storage = block_storage.get_empty_contract_storage(ContractId(VertexId(b''))) + super().setUp() + + def _run_test(self, data_in: T, value: NCType[T]) -> None: + # XXX: maybe make the key random? + key = b'x' + # make sure the key is unused + self.assertFalse(self.storage.has_obj(key)) + # value goes in + self.storage.put_obj(key, value, data_in) + # the key should be present + self.assertTrue(self.storage.has_obj(key)) + # value comes out + data_out = self.storage.get_obj(key, value) + # should be the same + self.assertEqual(data_in, data_out) + # clean up + self.storage.del_obj(key) + # make sure the storage got rid of it + self.assertFalse(self.storage.has_obj(key)) + + def test_str(self) -> None: + self._run_test('nano', STR_NC_TYPE) + + def test_str_empty(self) -> None: + self._run_test('', STR_NC_TYPE) + + def test_bytes(self) -> None: + self._run_test(b'nano', BYTES_NC_TYPE) + + def test_bytes_empty(self) -> None: + self._run_test(b'', BYTES_NC_TYPE) + + def test_int_positive(self) -> None: + self._run_test(123, INT_NC_TYPE) + + def test_int_zero(self) -> None: + self._run_test(0, INT_NC_TYPE) + + def test_int_negative(self) -> None: + self._run_test(-123, INT_NC_TYPE) + + def test_bigint(self) -> None: + self._run_test(2**40, INT_NC_TYPE) + + def test_float(self) -> None: + with self.assertRaises(TypeError): + make_nc_type(float) + with self.assertRaises(TypeError): + # XXX: ignore misc, mypy catches this error but we want to test for it + self._run_test(1.23, INT_NC_TYPE) # type: ignore[misc] + + def test_none(self) -> None: + value = NullNCType() + self._run_test(None, value) + + def test_optional(self) -> None: + value: NCType[int | None] = make_nc_type(int | None) # type: ignore[arg-type] + self._run_test(1, value) + self._run_test(None, value) + + def test_bool_true(self) -> None: + self._run_test(True, BOOL_NC_TYPE) + + def test_bool_false(self) -> None: + self._run_test(False, BOOL_NC_TYPE) + + def test_tuple(self) -> None: + value: NCType[tuple[str, int, set[int], bool]] + value = make_nc_type(tuple[str, int, set[int], bool]) # type: ignore[arg-type] + self._run_test(('str', 1, {3}, True), value) + + def test_changes_tracker_delete(self) -> None: + self.storage.put_obj(b'x', INT_NC_TYPE, 1) + changes_tracker = NCChangesTracker(ContractId(VertexId(b'')), self.storage) + self.assertEqual(1, changes_tracker.get_obj(b'x', INT_NC_TYPE)) + + changes_tracker.del_obj(b'x') + # Confirm the key has been deleted. + with self.assertRaises(KeyError): + changes_tracker.get_obj(b'x', INT_NC_TYPE) + # Check that the key has not been deleted on the storage. + self.assertEqual(1, self.storage.get_obj(b'x', INT_NC_TYPE)) + + # Commit changes and confirm the key was deleted on the storage. + changes_tracker.commit() + with self.assertRaises(KeyError): + self.storage.get_obj(b'x', INT_NC_TYPE) + + def test_changes_tracker_early_error(self) -> None: + self.storage.put_obj(b'x', INT_NC_TYPE, 1) + changes_tracker = NCChangesTracker(ContractId(VertexId(b'')), self.storage) + + # changes tracker should fail early when trying to use a value that would fail the serialzitation + # (internally it effectively serializes that type early) + with self.assertRaises(TypeError): + # 3 is an invalid bool + changes_tracker.put_obj(b'y', BOOL_NC_TYPE, 3) # type: ignore[misc] + + # other examples of failures: + + amount_nc_type = make_nc_type(Amount) + with self.assertRaises(ValueError): + # Amount must be non-negative + changes_tracker.put_obj(b'y', amount_nc_type, -1) # type: ignore[misc] + + timestamp_nc_type = make_nc_type(Timestamp) + with self.assertRaises(ValueError): + # Timestamp uses Int32NCType + changes_tracker.put_obj(b'y', timestamp_nc_type, 2**32) # type: ignore[misc] + + nested_nc_type = make_nc_type(dict[int, set[int]]) + with self.assertRaises(TypeError): + # inner string is not int + changes_tracker.put_obj(b'y', nested_nc_type, {1: {'foo'}}) # type: ignore[misc] diff --git a/tests/nanocontracts/test_syscalls.py b/tests/nanocontracts/test_syscalls.py new file mode 100644 index 000000000..cc14ddc71 --- /dev/null +++ b/tests/nanocontracts/test_syscalls.py @@ -0,0 +1,197 @@ +from typing import Optional + +import pytest + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCInvalidSyscall +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_arg_type as make_nc_type +from hathor.nanocontracts.storage.contract_storage import Balance, BalanceKey +from hathor.nanocontracts.types import ( + BlueprintId, + ContractId, + NCDepositAction, + NCGrantAuthorityAction, + TokenUid, + public, +) +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +CONTRACT_NC_TYPE = make_nc_type(ContractId) +BLUEPRINT_NC_TYPE = make_nc_type(BlueprintId) +OPT_CONTRACT_NC_TYPE: NCType[ContractId | None] = make_nc_type(ContractId | None) # type: ignore[arg-type] +OPT_BLUEPRINT_NC_TYPE: NCType[BlueprintId | None] = make_nc_type(BlueprintId | None) # type: ignore[arg-type] + + +class MyBlueprint(Blueprint): + my_nc_id: ContractId + my_blueprint_id: BlueprintId + + other_nc_id: Optional[ContractId] + other_blueprint_id: Optional[BlueprintId] + + @public + def initialize(self, ctx: Context, other_nc_id: ContractId) -> None: + self.my_nc_id = self.syscall.get_contract_id() + self.my_blueprint_id = self.syscall.get_blueprint_id() + + self.other_nc_id = other_nc_id + self.other_blueprint_id = self.syscall.get_blueprint_id(other_nc_id) + + +class OtherBlueprint(Blueprint): + @public(allow_deposit=True, allow_grant_authority=True) + def initialize(self, ctx: Context) -> None: + pass + + @public(allow_grant_authority=True) + def nop(self, ctx: Context) -> None: + pass + + @public + def revoke(self, ctx: Context, token_uid: TokenUid, revoke_mint: bool, revoke_melt: bool) -> None: + self.syscall.revoke_authorities(token_uid, revoke_mint=revoke_mint, revoke_melt=revoke_melt) + + @public + def mint(self, ctx: Context, token_uid: TokenUid, amount: int) -> None: + self.syscall.mint_tokens(token_uid, amount) + + @public + def melt(self, ctx: Context, token_uid: TokenUid, amount: int) -> None: + self.syscall.melt_tokens(token_uid, amount) + + +class NCNanoContractTestCase(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.my_blueprint_id = self.gen_random_blueprint_id() + self.other_blueprint_id = self.gen_random_blueprint_id() + + self.nc_catalog.blueprints[self.my_blueprint_id] = MyBlueprint + self.nc_catalog.blueprints[self.other_blueprint_id] = OtherBlueprint + + def test_basics(self) -> None: + nc1_id = self.gen_random_contract_id() + nc2_id = self.gen_random_contract_id() + + tx = self.get_genesis_tx() + + ctx = Context([], tx, self.gen_random_address(), timestamp=0) + self.runner.create_contract(nc1_id, self.other_blueprint_id, ctx) + self.runner.create_contract(nc2_id, self.my_blueprint_id, ctx, nc1_id) + + storage2 = self.runner.get_storage(nc2_id) + + assert storage2.get_obj(b'my_nc_id', CONTRACT_NC_TYPE) == nc2_id + assert storage2.get_obj(b'other_nc_id', OPT_CONTRACT_NC_TYPE) == nc1_id + + assert storage2.get_obj(b'my_blueprint_id', BLUEPRINT_NC_TYPE) == self.my_blueprint_id + assert storage2.get_obj(b'other_blueprint_id', OPT_BLUEPRINT_NC_TYPE) == self.other_blueprint_id + + def test_authorities(self) -> None: + nc_id = self.gen_random_contract_id() + token_a_uid = self.gen_random_token_uid() + htr_balance_key = BalanceKey(nc_id=nc_id, token_uid=HATHOR_TOKEN_UID) + tka_balance_key = BalanceKey(nc_id=nc_id, token_uid=token_a_uid) + + ctx_initialize = Context( + actions=[ + NCDepositAction(token_uid=TokenUid(HATHOR_TOKEN_UID), amount=1000), + NCDepositAction(token_uid=token_a_uid, amount=1000), + ], + vertex=self.get_genesis_tx(), + address=self.gen_random_address(), + timestamp=0, + ) + + self.runner.create_contract(nc_id, self.other_blueprint_id, ctx_initialize) + storage = self.runner.get_storage(nc_id) + + ctx_grant = Context( + actions=[NCGrantAuthorityAction(token_uid=token_a_uid, mint=True, melt=True)], + vertex=self.get_genesis_tx(), + address=self.gen_random_address(), + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx_grant) + + ctx = Context( + actions=[], + vertex=self.get_genesis_tx(), + address=self.gen_random_address(), + timestamp=0, + ) + + # Starting state + assert storage.get_all_balances() == { + htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + tka_balance_key: Balance(value=1000, can_mint=True, can_melt=True), + } + + # After mint + self.runner.call_public_method(nc_id, 'mint', ctx, token_a_uid, 123) + assert storage.get_all_balances() == { + htr_balance_key: Balance(value=998, can_mint=False, can_melt=False), + tka_balance_key: Balance(value=1123, can_mint=True, can_melt=True), + } + + # After melt + self.runner.call_public_method(nc_id, 'melt', ctx, token_a_uid, 456) + assert storage.get_all_balances() == { + htr_balance_key: Balance(value=1002, can_mint=False, can_melt=False), + tka_balance_key: Balance(value=667, can_mint=True, can_melt=True), + } + + # After revoke mint + self.runner.call_public_method(nc_id, 'revoke', ctx, token_a_uid, True, False) + assert storage.get_all_balances() == { + htr_balance_key: Balance(value=1002, can_mint=False, can_melt=False), + tka_balance_key: Balance(value=667, can_mint=False, can_melt=True), + } + + # After revoke melt + self.runner.call_public_method(nc_id, 'revoke', ctx, token_a_uid, False, True) + assert storage.get_all_balances() == { + htr_balance_key: Balance(value=1002, can_mint=False, can_melt=False), + tka_balance_key: Balance(value=667, can_mint=False, can_melt=False), + } + + # Try revoke mint without having the authority + msg = f'contract {nc_id.hex()} cannot mint {token_a_uid.hex()} tokens' + with pytest.raises(NCInvalidSyscall, match=msg): + self.runner.call_public_method(nc_id, 'revoke', ctx, token_a_uid, True, False) + + # Try revoke melt without having the authority + msg = f'contract {nc_id.hex()} cannot melt {token_a_uid.hex()} tokens' + with pytest.raises(NCInvalidSyscall, match=msg): + self.runner.call_public_method(nc_id, 'revoke', ctx, token_a_uid, False, True) + + # Try mint TKA + msg = f'contract {nc_id.hex()} cannot mint {token_a_uid.hex()} tokens' + with pytest.raises(NCInvalidSyscall, match=msg): + self.runner.call_public_method(nc_id, 'mint', ctx, token_a_uid, 123) + + # Try melt TKA + msg = f'contract {nc_id.hex()} cannot melt {token_a_uid.hex()} tokens' + with pytest.raises(NCInvalidSyscall, match=msg): + self.runner.call_public_method(nc_id, 'melt', ctx, token_a_uid, 456) + + # Try mint HTR + with pytest.raises(NCInvalidSyscall, match=f'contract {nc_id.hex()} cannot mint HTR tokens'): + self.runner.call_public_method(nc_id, 'mint', ctx, HATHOR_TOKEN_UID, 123) + + # Try melt HTR + with pytest.raises(NCInvalidSyscall, match=f'contract {nc_id.hex()} cannot melt HTR tokens'): + self.runner.call_public_method(nc_id, 'melt', ctx, HATHOR_TOKEN_UID, 456) + + # Try revoke HTR authorities + with pytest.raises(NCInvalidSyscall, match=f'contract {nc_id.hex()} cannot revoke authorities from HTR token'): + self.runner.call_public_method(nc_id, 'revoke', ctx, HATHOR_TOKEN_UID, True, False) + + # Final state + assert storage.get_all_balances() == { + htr_balance_key: Balance(value=1002, can_mint=False, can_melt=False), + tka_balance_key: Balance(value=667, can_mint=False, can_melt=False), + } diff --git a/tests/nanocontracts/test_syscalls_in_view.py b/tests/nanocontracts/test_syscalls_in_view.py new file mode 100644 index 000000000..3058d4958 --- /dev/null +++ b/tests/nanocontracts/test_syscalls_in_view.py @@ -0,0 +1,167 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public, view +from hathor.nanocontracts.blueprint_env import BlueprintEnvironment +from hathor.nanocontracts.exception import NCViewMethodError +from hathor.nanocontracts.types import BlueprintId, ContractId, NCRawArgs, TokenUid, VertexId +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + other_id: ContractId | None + + @public + def initialize(self, ctx: Context, other_id: ContractId | None) -> None: + self.other_id = other_id + + @view + def nop(self) -> None: + pass + + @view + def test_rng(self) -> None: + self.syscall.rng.randbits(1) + + @view + def get_contract_id(self) -> None: + self.syscall.get_contract_id() + + @view + def get_blueprint_id(self) -> None: + self.syscall.get_blueprint_id() + + @view + def get_balance_before_current_call(self) -> None: + self.syscall.get_balance_before_current_call() + + @view + def get_current_balance(self) -> None: + self.syscall.get_current_balance() + + @view + def can_mint(self) -> None: + self.syscall.can_mint(TokenUid(b'')) + + @view + def can_mint_before_current_call(self) -> None: + self.syscall.can_mint_before_current_call(TokenUid(b'')) + + @view + def can_melt(self) -> None: + self.syscall.can_melt(TokenUid(b'')) + + @view + def can_melt_before_current_call(self) -> None: + self.syscall.can_melt_before_current_call(TokenUid(b'')) + + @view + def call_public_method(self) -> None: + self.syscall.call_public_method(ContractId(VertexId(b'')), '', []) + + @view + def call_view_method(self) -> None: + assert self.other_id is not None + self.syscall.call_view_method(self.other_id, 'nop') + + @view + def revoke_authorities(self) -> None: + self.syscall.revoke_authorities(TokenUid(b''), revoke_mint=True, revoke_melt=True) + + @view + def mint_tokens(self) -> None: + self.syscall.mint_tokens(TokenUid(b''), 0) + + @view + def melt_tokens(self) -> None: + self.syscall.melt_tokens(TokenUid(b''), 0) + + @view + def create_contract(self) -> None: + self.syscall.create_contract(BlueprintId(VertexId(b'')), b'', []) + + @view + def emit_event(self) -> None: + self.syscall.emit_event(b'') + + @view + def create_token(self) -> None: + self.syscall.create_token('', '', 0) + + @view + def proxy_call_public_method(self) -> None: + self.syscall.proxy_call_public_method(BlueprintId(VertexId(b'')), '', []) + + @view + def proxy_call_public_method_nc_args(self) -> None: + nc_args = NCRawArgs(b'') + self.syscall.proxy_call_public_method_nc_args(BlueprintId(VertexId(b'')), '', [], nc_args) + + @view + def change_blueprint(self) -> None: + self.syscall.change_blueprint(BlueprintId(VertexId(b''))) + + +class TestSyscallsInView(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + + self.ctx = Context( + actions=[], + vertex=self.get_genesis_tx(), + address=self.gen_random_address(), + timestamp=self.now, + ) + + def test_rng(self) -> None: + contract_id = self.gen_random_contract_id() + self.runner.create_contract(contract_id, self.blueprint_id, self.ctx, None) + + with pytest.raises(NCViewMethodError, match='@view method cannot call `syscall.rng`'): + self.runner.call_view_method(contract_id, 'test_rng') + + def test_syscalls(self) -> None: + other_id = self.gen_random_contract_id() + self.runner.create_contract(other_id, self.blueprint_id, self.ctx, None) + + properties = {'rng'} # each property must be tested specifically + allowed_view_syscalls = { + 'get_contract_id', + 'get_blueprint_id', + 'get_balance', + 'get_balance_before_current_call', + 'get_current_balance', + 'can_mint', + 'can_mint_before_current_call', + 'can_melt', + 'can_melt_before_current_call', + 'call_view_method', + } + + for method_name, method in BlueprintEnvironment.__dict__.items(): + if '__' in method_name or method_name in properties: + continue + + contract_id = self.gen_random_contract_id() + self.runner.create_contract(contract_id, self.blueprint_id, self.ctx, other_id) + + if method_name in allowed_view_syscalls: + self.runner.call_view_method(contract_id, method_name) + else: + with pytest.raises(NCViewMethodError, match=f'@view method cannot call `syscall.{method_name}`'): + self.runner.call_view_method(contract_id, method_name) diff --git a/tests/nanocontracts/test_token_creation.py b/tests/nanocontracts/test_token_creation.py new file mode 100644 index 000000000..80559cd9b --- /dev/null +++ b/tests/nanocontracts/test_token_creation.py @@ -0,0 +1,242 @@ + +from hathor.conf import HathorSettings +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.nc_exec_logs import NCLogConfig +from hathor.nanocontracts.storage.contract_storage import Balance, BalanceKey +from hathor.nanocontracts.types import ContractId, NCWithdrawalAction, TokenUid, VertexId, public +from hathor.nanocontracts.utils import derive_child_token_id +from hathor.transaction import Block, Transaction +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.token_creation_tx import TokenDescription +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.utils import assert_nc_failure_reason + +settings = HathorSettings() + + +class MyBlueprint(Blueprint): + a: str + b: int + + @public(allow_deposit=True) + def initialize(self, ctx: Context) -> None: + pass + + @public(allow_withdrawal=True) + def withdraw(self, ctx: Context) -> None: + pass + + @public(allow_deposit=True) + def create_token( + self, + ctx: Context, + token_name: str, + token_symbol: str, + amount: int, + mint_authority: bool, + melt_authority: bool, + ) -> None: + self.syscall.create_token(token_name, token_symbol, amount, mint_authority, melt_authority) + + +class NCNanoContractTestCase(unittest.TestCase): + def setUp(self): + super().setUp() + + self.myblueprint_id = b'x' * 32 + self.catalog = NCBlueprintCatalog({ + self.myblueprint_id: MyBlueprint + }) + + self.manager = self.create_peer('unittests', nc_log_config=NCLogConfig.FAILED, wallet_index=True) + self.manager.tx_storage.nc_catalog = self.catalog + + def test_token_creation_by_vertex(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + vertices = dag_builder.build_from_str(f''' + blockchain genesis b[1..40] + b30 < dummy + + tx1.nc_id = "{self.myblueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.out[0] = 10 HTR + tx2.out[1] = 100 TKA # call a method of an existing contract + tx2.out[2] = 150 ABC # ABC is a token created w/out using nano headers + tx2.out[3] = 250 DEF # create a new contract, no deposits + tx2.out[4] = 350 GHI # create a new contract, depositing 10 HTR into it + tx2.out[5] = 450 JKL # call a method of an existing contract with partial withdrawal + + tx3.out[1] = 200 TKB + + TKA.nc_id = tx1 + TKA.nc_method = withdraw() + TKA.nc_withdrawal = 1 HTR + + DEF.nc_id = "{self.myblueprint_id.hex()}" + DEF.nc_method = initialize() + + GHI.nc_id = "{self.myblueprint_id.hex()}" + GHI.nc_method = initialize() + GHI.nc_deposit = 10 HTR + + # JKL needs to deposit 5 HTR to create 450 JKL tokens. + # - 3 HTR will be covered by a withdrawal from a contract + # - 2 HTR will be covered by inputs + JKL.nc_id = GHI + JKL.nc_method = withdraw() + JKL.nc_withdrawal = 3 HTR + + TKB.nc_id = tx1 + TKB.nc_method = withdraw() + TKB.nc_withdrawal = 2 HTR + + TKA < TKB + + b31 --> tx1 + b32 --> tx2 + b33 --> tx3 + ''') + + vertices.propagate_with(self.manager, up_to='b31') + tx1, = vertices.get_typed_vertices(['tx1'], Transaction) + + nc_storage = self.manager.get_best_block_nc_storage(tx1.hash) + assert tx1.is_nano_contract() + assert nc_storage.get_balance(settings.HATHOR_TOKEN_UID) == Balance(value=1, can_mint=False, can_melt=False) + + vertices.propagate_with(self.manager, up_to='b32') + TKA, ABC, DEF, GHI, JKL, tx2 = vertices.get_typed_vertices( + ['TKA', 'ABC', 'DEF', 'GHI', 'JKL', 'tx2'], + Transaction + ) + + assert not ABC.is_nano_contract() + assert TKA.get_metadata().voided_by is None + + assert TKA.is_nano_contract() + assert TKA.get_metadata().voided_by is None + + assert DEF.is_nano_contract() + assert DEF.get_metadata().voided_by is None + + assert GHI.is_nano_contract() + assert GHI.get_metadata().voided_by is None + + assert JKL.is_nano_contract() + assert JKL.get_metadata().voided_by is None + + nc_storage = self.manager.get_best_block_nc_storage(tx1.hash) + assert nc_storage.get_balance(settings.HATHOR_TOKEN_UID) == Balance(value=0, can_mint=False, can_melt=False) + + ghi_nc_storage = self.manager.get_best_block_nc_storage(GHI.hash) + assert ghi_nc_storage.get_balance(settings.HATHOR_TOKEN_UID) == ( + Balance(value=7, can_mint=False, can_melt=False) + ) + + jkl_token_info = JKL._get_token_info_from_inputs() + JKL._update_token_info_from_outputs(token_dict=jkl_token_info) + assert jkl_token_info[settings.HATHOR_TOKEN_UID].amount == -2 + + jkl_context = JKL.get_nano_header().get_context() + htr_token_uid = TokenUid(settings.HATHOR_TOKEN_UID) + assert jkl_context.actions[htr_token_uid] == (NCWithdrawalAction(token_uid=htr_token_uid, amount=3),) + + assert not tx2.is_nano_contract() + assert tx2.get_metadata().voided_by is None + + vertices.propagate_with(self.manager) + TKB, tx3 = vertices.get_typed_vertices(['TKB', 'tx3'], Transaction) + + nc_storage = self.manager.get_best_block_nc_storage(tx1.hash) + assert nc_storage.get_balance(settings.HATHOR_TOKEN_UID) == Balance(value=0, can_mint=False, can_melt=False) + + assert TKB.is_nano_contract() + assert TKB.get_metadata().voided_by == {TKB.hash, NC_EXECUTION_FAIL_ID} + + assert not tx3.is_nano_contract() + assert tx3.get_metadata().voided_by == {TKB.hash} + + def test_token_creation_by_contract(self) -> None: + token_symbol = 'TKA' + + dag_builder = TestDAGBuilder.from_manager(self.manager) + vertices = dag_builder.build_from_str(f''' + blockchain genesis b[1..40] + b30 < dummy + + tx1.nc_id = "{self.myblueprint_id.hex()}" + tx1.nc_method = initialize() + + tx2.nc_id = tx1 + tx2.nc_method = create_token("MyToken", "{token_symbol}", 100, false, false) + tx2.nc_deposit = 3 HTR + + tx3.nc_id = tx1 + tx3.nc_method = create_token("MyToken (2)", "{token_symbol}", 50, true, false) + tx3.nc_deposit = 1 HTR + + tx2 < tx3 + + b31 --> tx1 + b31 --> tx2 + b32 --> tx3 + ''') + + vertices.propagate_with(self.manager) + + tx1, tx2, tx3 = vertices.get_typed_vertices(['tx1', 'tx2', 'tx3'], Transaction) + b31, b32 = vertices.get_typed_vertices(['b31', 'b32'], Block) + + # Uncomment for debugging: + # from tests.nanocontracts.utils import get_nc_failure_entry + # failure_entry = get_nc_failure_entry(manager=self.manager, tx_id=tx2.hash, block_id=b31.hash) + # print(failure_entry.error_traceback) + + assert tx1.get_metadata().voided_by is None + assert tx1.get_metadata().nc_execution is NCExecutionState.SUCCESS + + assert tx2.get_metadata().voided_by is None + assert tx2.get_metadata().nc_execution is NCExecutionState.SUCCESS + + assert tx3.get_metadata().voided_by == {tx3.hash, NC_EXECUTION_FAIL_ID} + assert tx3.get_metadata().nc_execution is NCExecutionState.FAILURE + + assert b31.get_metadata().voided_by is None + assert b32.get_metadata().voided_by is None + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx3.hash, + block_id=b32.hash, + reason='NCTokenAlreadyExists', + ) + + child_token_id = derive_child_token_id(ContractId(VertexId(tx1.hash)), token_symbol) + child_token_balance_key = BalanceKey(nc_id=tx1.hash, token_uid=child_token_id) + htr_balance_key = BalanceKey(nc_id=tx1.hash, token_uid=settings.HATHOR_TOKEN_UID) + + block_storage = self.manager.get_nc_block_storage(b31) + expected_token_info = TokenDescription( + token_id=child_token_id, + token_name='MyToken', + token_symbol=token_symbol, + ) + assert block_storage.get_token_description(child_token_id) == expected_token_info + + nc_storage = block_storage.get_contract_storage(tx1.hash) + assert nc_storage.get_all_balances() == { + child_token_balance_key: Balance(value=100, can_mint=False, can_melt=False), + htr_balance_key: Balance(value=2, can_mint=False, can_melt=False), + } + + tokens_index = self.manager.tx_storage.indexes.tokens + assert tokens_index.get_token_info(settings.HATHOR_TOKEN_UID).get_total() == ( + settings.GENESIS_TOKENS + 40 * settings.INITIAL_TOKENS_PER_BLOCK - 1 + ) + assert tokens_index.get_token_info(child_token_id).get_total() == 100 diff --git a/tests/nanocontracts/test_types.py b/tests/nanocontracts/test_types.py new file mode 100644 index 000000000..3bbea685e --- /dev/null +++ b/tests/nanocontracts/test_types.py @@ -0,0 +1,45 @@ +from hathor.nanocontracts.types import ContractId, SignedData, VertexId +from hathor.transaction.scripts import P2PKH +from tests import unittest + + +class BaseNanoContractTestCase(unittest.TestCase): + def test_signed(self) -> None: + from hathor.wallet import KeyPair + + nc_id = ContractId(VertexId(b'x' * 32)) + + result = b'1x1' + signed_result = SignedData[bytes](result, b'') + result_bytes = signed_result.get_data_bytes(nc_id) + + # Check signature using oracle's private key. + key = KeyPair.create(b'123') + assert key.address is not None + script_input = key.p2pkh_create_input_data(b'123', result_bytes) + signed_result = SignedData[bytes](result, script_input) + + p2pkh = P2PKH(key.address) + oracle_script = p2pkh.get_script() + self.assertTrue(signed_result.checksig(nc_id, oracle_script)) + + # Try to tamper with the data. + fake_result = b'2x2' + self.assertNotEqual(result, fake_result) + invalid_signed_result = SignedData[bytes](fake_result, script_input) + self.assertFalse(invalid_signed_result.checksig(nc_id, oracle_script)) + + # Try to use the wrong private key to sign the data. + key2 = KeyPair.create(b'456') + assert key2.address is not None + p2pkh2 = P2PKH(key2.address) + oracle_script2 = p2pkh2.get_script() + self.assertFalse(signed_result.checksig(nc_id, oracle_script2)) + + def test_signed_eq(self): + x = SignedData[str]('data', b'signature') + + self.assertEqual(x, SignedData[str]('data', b'signature')) + self.assertNotEqual(x, SignedData[str]('data', b'another-signature')) + self.assertNotEqual(x, SignedData[str]('another-data', 'signature')) + self.assertNotEqual(x, SignedData[str]('another-data', 'another-signature')) diff --git a/tests/nanocontracts/test_types_across_contracts.py b/tests/nanocontracts/test_types_across_contracts.py new file mode 100644 index 000000000..eb1fb4e94 --- /dev/null +++ b/tests/nanocontracts/test_types_across_contracts.py @@ -0,0 +1,173 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from hathor.nanocontracts import Blueprint, Context, NCFail, public +from hathor.nanocontracts.types import ContractId, NCArgs, fallback, view +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def public_method(self, ctx: Context, a: int) -> None: + pass + + @public + def public_method_wrong_return_type(self, ctx: Context) -> int: + return 'abc' # type: ignore[return-value] + + @view + def view_method(self, a: int) -> None: + pass + + @view + def view_method_wrong_return_type(self) -> int: + return 'abc' # type: ignore[return-value] + + @public + def call_public_wrong_arg_type(self, ctx: Context, other_id: ContractId) -> None: + self.syscall.call_public_method(other_id, 'public_method', [], 'abc') + + @public + def call_public_wrong_kwarg_type(self, ctx: Context, other_id: ContractId) -> None: + self.syscall.call_public_method(other_id, 'public_method', [], a='abc') + + @public + def call_public_wrong_return_type(self, ctx: Context, other_id: ContractId) -> None: + self.syscall.call_public_method(other_id, 'public_method_wrong_return_type', []) + + @view + def call_view_wrong_arg_type(self, other_id: ContractId) -> None: + self.syscall.call_view_method(other_id, 'view_method', 'abc') + + @view + def call_view_wrong_kwarg_type(self, other_id: ContractId) -> None: + self.syscall.call_view_method(other_id, 'view_method', a='abc') + + @view + def call_view_wrong_return_type(self, other_id: ContractId) -> None: + self.syscall.call_view_method(other_id, 'view_method_wrong_return_type') + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> int: + return 'abc' # type: ignore[return-value] + + @public + def call_mutate_list(self, ctx: Context, other_id: ContractId) -> None: + items = [1, 2, 3] + self.syscall.call_public_method(other_id, 'mutate_list', [], items) + assert items == [1, 2, 3] + + @public + def mutate_list(self, ctx: Context, items: list[int]) -> None: + assert items == [1, 2, 3] + items.append(4) + assert items == [1, 2, 3, 4] + + +class TestTypesAcrossContracts(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.contract_id1 = self.gen_random_contract_id() + self.contract_id2 = self.gen_random_contract_id() + self.runner.create_contract(self.contract_id1, self.blueprint_id, self.create_context()) + self.runner.create_contract(self.contract_id2, self.blueprint_id, self.create_context()) + + def test_public_wrong_arg_type(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.call_public_method( + self.contract_id1, + 'call_public_wrong_arg_type', + self.create_context(), + self.contract_id2, + ) + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_public_wrong_kwarg_type(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.call_public_method( + self.contract_id1, + 'call_public_wrong_kwarg_type', + self.create_context(), + self.contract_id2, + ) + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_public_wrong_return_type(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.call_public_method( + self.contract_id1, + 'call_public_wrong_return_type', + self.create_context(), + self.contract_id2, + ) + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_view_wrong_arg_type(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.call_view_method( + self.contract_id1, + 'call_view_wrong_arg_type', + self.contract_id2, + ) + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_view_wrong_kwarg_type(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.call_view_method( + self.contract_id1, + 'call_view_wrong_kwarg_type', + self.contract_id2, + ) + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_view_wrong_return_type(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.call_view_method( + self.contract_id1, + 'call_view_wrong_return_type', + self.contract_id2, + ) + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_fallback_wrong_return_type(self) -> None: + with pytest.raises(NCFail) as e: + self.runner.call_public_method( + self.contract_id1, + 'unknown', + self.create_context(), + self.contract_id2, + ) + assert isinstance(e.value.__cause__, TypeError) + assert e.value.__cause__.args[0] == 'expected integer' + + def test_arg_mutation(self) -> None: + self.runner.call_public_method( + self.contract_id1, + 'call_mutate_list', + self.create_context(), + self.contract_id2, + ) diff --git a/tests/nanocontracts/test_violations.py b/tests/nanocontracts/test_violations.py new file mode 100644 index 000000000..c17cb0cac --- /dev/null +++ b/tests/nanocontracts/test_violations.py @@ -0,0 +1,81 @@ +from hathor.nanocontracts import Blueprint, public +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import NCFail +from hathor.nanocontracts.types import NCDepositAction +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + total: int + + @public + def initialize(self, ctx: Context) -> None: + self.total = 3 + + @public + def modify_actions(self, ctx: Context) -> None: + ctx.actions[b'\00'] = NCDepositAction(token_uid=b'\00', amount=1_000) # type: ignore + + @public + def modify_vertex(self, ctx: Context) -> None: + ctx.vertex.inputs[0] = None # type: ignore + + @public + def assign_declared_attribute(self, ctx: Context) -> None: + self.total += 1 + + @public + def assign_non_declared_attribute(self, ctx: Context) -> None: + self.unknown = 1 + + +class ViolationsTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + + self.blueprint_id = self.gen_random_blueprint_id() + self.contract_id = self.gen_random_contract_id() + self.nc_catalog.blueprints[self.blueprint_id] = MyBlueprint + self.tx = self.get_genesis_tx() + self.address = self.gen_random_address() + + def test_modify_actions(self) -> None: + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.create_contract(self.contract_id, self.blueprint_id, context) + + with self.assertRaises(NCFail) as cm: + self.runner.call_public_method(self.contract_id, 'modify_actions', context) + exc = cm.exception + self.assertIsInstance(exc.__cause__, TypeError) + + def test_modify_vertex(self) -> None: + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.create_contract(self.contract_id, self.blueprint_id, context) + with self.assertRaises(NCFail) as cm: + self.runner.call_public_method(self.contract_id, 'modify_vertex', context) + exc = cm.exception + self.assertIsInstance(exc.__cause__, TypeError) + + def test_assign_non_declared_attribute(self) -> None: + context = Context( + actions=[], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + self.runner.create_contract(self.contract_id, self.blueprint_id, context) + self.runner.call_public_method(self.contract_id, 'assign_declared_attribute', context) + with self.assertRaises(NCFail) as cm: + self.runner.call_public_method(self.contract_id, 'assign_non_declared_attribute', context) + exc = cm.exception + self.assertIsInstance(exc.__cause__, AttributeError) diff --git a/tests/nanocontracts/utils.py b/tests/nanocontracts/utils.py new file mode 100644 index 000000000..2f8780150 --- /dev/null +++ b/tests/nanocontracts/utils.py @@ -0,0 +1,100 @@ +from typing import Any + +from hathor.conf.settings import HathorSettings +from hathor.manager import HathorManager +from hathor.nanocontracts import Blueprint +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.nc_exec_logs import NCExecEntry, NCLogConfig +from hathor.nanocontracts.runner import Runner +from hathor.nanocontracts.storage import NCBlockStorage, NCStorageFactory +from hathor.nanocontracts.utils import sign_pycoin +from hathor.reactor import ReactorProtocol +from hathor.transaction import Transaction +from hathor.transaction.headers.nano_header import NanoHeader, NanoHeaderAction +from hathor.transaction.storage import TransactionStorage +from hathor.types import VertexId +from hathor.util import not_none +from hathor.wallet import HDWallet + + +class TestRunner(Runner): + __test__ = False + + def __init__( + self, + tx_storage: TransactionStorage, + storage_factory: NCStorageFactory, + block_storage: NCBlockStorage, + *, + settings: HathorSettings, + reactor: ReactorProtocol, + seed: bytes | None = None, + ) -> None: + if seed is None: + seed = b'x' * 32 + super().__init__( + tx_storage=tx_storage, + storage_factory=storage_factory, + block_storage=block_storage, + settings=settings, + reactor=reactor, + seed=seed, + ) + + +def get_nc_failure_entry(*, manager: HathorManager, tx_id: VertexId, block_id: VertexId) -> NCExecEntry: + """Return the failure entry for a nano execution.""" + nc_log_storage = manager.consensus_algorithm.block_algorithm_factory.nc_log_storage + assert nc_log_storage._config in {NCLogConfig.ALL, NCLogConfig.FAILED}, ( + 'to get NCFail reason, NC logs must be enabled' + ) + logs = not_none(nc_log_storage.get_logs(tx_id, block_id=block_id)) + return logs.entries[block_id][-1] + + +def assert_nc_failure_reason(*, manager: HathorManager, tx_id: VertexId, block_id: VertexId, reason: str) -> None: + """A function to assert NCFail reason in tests by inspecting NC logs.""" + failure_entry = get_nc_failure_entry(manager=manager, tx_id=tx_id, block_id=block_id) + assert failure_entry.error_traceback is not None, 'no error found' + assert reason in failure_entry.error_traceback, ( + f'reason not found in nano error traceback\n\n' + f'expected: "{reason}"\n' + f'found:\n\n' + f'{failure_entry.error_traceback}' + ) + + +def set_nano_header( + *, + tx: Transaction, + wallet: HDWallet, + nc_id: VertexId, + nc_actions: list[NanoHeaderAction] | None = None, + nc_method: str | None = None, + nc_args: tuple[Any, ...] | None = None, + blueprint: type[Blueprint] | None = None, + seqnum: int = 1, +) -> None: + """Configure a nano header for a tx.""" + assert len(tx.headers) == 0 + privkey = wallet.get_key_at_index(0) + + nc_args_bytes = b'\x00' + if nc_args is not None: + assert nc_method is not None + method_parser = Method.from_callable(getattr(blueprint, nc_method)) + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + nano_header = NanoHeader( + tx=tx, + nc_seqnum=seqnum, + nc_id=nc_id, + nc_method=nc_method if nc_method is not None else 'nop', + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=nc_actions if nc_actions is not None else [], + ) + + sign_pycoin(nano_header, privkey) + tx.headers.append(nano_header) diff --git a/tests/others/test_builder.py b/tests/others/test_builder.py index b17b7c8c4..91f274d12 100644 --- a/tests/others/test_builder.py +++ b/tests/others/test_builder.py @@ -7,7 +7,6 @@ def setUp(self): super().setUp() self.reactor = self.clock self.builder = TestBuilder() - self.builder.use_memory() def test_multiple_calls_to_build(self): self.builder.build() diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index 68d1e3529..29d4e65b6 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -3,13 +3,13 @@ from hathor.builder import CliBuilder, ResourcesBuilder from hathor.cli.run_node_args import RunNodeArgs from hathor.event import EventManager -from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage +from hathor.event.storage import EventRocksDBStorage from hathor.event.websocket import EventWebsocketFactory from hathor.exception import BuilderError -from hathor.indexes import MemoryIndexesManager, RocksDBIndexesManager +from hathor.indexes import RocksDBIndexesManager from hathor.manager import HathorManager from hathor.p2p.sync_version import SyncVersion -from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage, TransactionRocksDBStorage +from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage from hathor.wallet import HDWallet, Wallet from tests import unittest @@ -44,7 +44,7 @@ def _build(self, cmd_args: list[str]) -> HathorManager: return manager def test_empty(self): - self._build_with_error([], '--data is expected') + self._build_with_error([], 'either --data or --temp-data is expected') def test_all_default(self): data_dir = self.mkdtemp() @@ -66,73 +66,51 @@ def test_disable_cache_storage(self): self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - def test_default_storage_memory_indexes(self): - data_dir = self.mkdtemp() - manager = self._build(['--memory-indexes', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) - self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) - self.assertIsInstance(manager.tx_storage.indexes, MemoryIndexesManager) - - def test_default_storage_with_rocksdb_indexes(self): - data_dir = self.mkdtemp() - manager = self._build(['--x-rocksdb-indexes', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) - self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) - self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - def test_rocksdb_storage(self): data_dir = self.mkdtemp() - manager = self._build(['--rocksdb-storage', '--data', data_dir]) + manager = self._build(['--data', data_dir]) self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - def test_memory_storage(self): - manager = self._build(['--memory-storage']) - self.assertIsInstance(manager.tx_storage, TransactionMemoryStorage) - self.assertIsInstance(manager.tx_storage.indexes, MemoryIndexesManager) - - def test_memory_storage_with_rocksdb_indexes(self): - self._build_with_error(['--memory-storage', '--x-rocksdb-indexes'], 'RocksDB indexes require RocksDB data') - def test_sync_default(self): - manager = self._build(['--memory-storage']) + manager = self._build(['--temp-data']) self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) def test_sync_bridge(self): - self._build_with_error(['--memory-storage', '--x-sync-bridge'], '--x-sync-bridge was removed') + self._build_with_error(['--temp-data', '--x-sync-bridge'], '--x-sync-bridge was removed') def test_sync_bridge2(self): - self._build_with_error(['--memory-storage', '--sync-bridge'], '--sync-bridge was removed') + self._build_with_error(['--temp-data', '--sync-bridge'], '--sync-bridge was removed') def test_sync_v2_only(self): - manager = self._build(['--memory-storage', '--x-sync-v2-only']) + manager = self._build(['--temp-data', '--x-sync-v2-only']) self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) def test_sync_v2_only2(self): - manager = self._build(['--memory-storage', '--sync-v2-only']) + manager = self._build(['--temp-data', '--sync-v2-only']) self.assertFalse(manager.connections.is_sync_version_enabled(SyncVersion.V1_1)) self.assertTrue(manager.connections.is_sync_version_enabled(SyncVersion.V2)) def test_sync_v1_only(self): - self._build_with_error(['--memory-storage', '--sync-v1-only'], '--sync-v1-only was removed') + self._build_with_error(['--temp-data', '--sync-v1-only'], '--sync-v1-only was removed') def test_keypair_wallet(self): - manager = self._build(['--memory-storage', '--wallet', 'keypair']) + manager = self._build(['--temp-data', '--wallet', 'keypair']) self.assertIsInstance(manager.wallet, Wallet) def test_hd_wallet(self): - manager = self._build(['--memory-storage', '--wallet', 'hd']) + manager = self._build(['--temp-data', '--wallet', 'hd']) self.assertIsInstance(manager.wallet, HDWallet) def test_invalid_wallet(self): - self._build_with_error(['--memory-storage', '--wallet', 'invalid-wallet'], 'Invalid type of wallet') + self._build_with_error(['--temp-data', '--wallet', 'invalid-wallet'], 'Invalid type of wallet') def test_status(self): self._build([ - '--memory-storage', + '--temp-data', '--status', '8080', '--utxo-index', '--enable-debug-api', @@ -142,7 +120,7 @@ def test_status(self): self.clean_pending(required_to_quiesce=False) def test_prometheus_no_data(self): - args = ['--memory-storage', '--prometheus'] + args = ['--temp-data', '--prometheus'] self._build_with_error(args, 'To run prometheus exporter you must have a data path') def test_prometheus(self): @@ -151,24 +129,11 @@ def test_prometheus(self): self.assertTrue(self.resources_builder._built_prometheus) self.clean_pending(required_to_quiesce=False) - def test_memory_and_rocksdb_indexes(self): - data_dir = self.mkdtemp() - args = ['--memory-indexes', '--x-rocksdb-indexes', '--data', data_dir] - self._build_with_error(args, 'You cannot use --memory-indexes and --x-rocksdb-indexes.') - def test_event_queue_with_rocksdb_storage(self): data_dir = self.mkdtemp() - manager = self._build(['--x-enable-event-queue', '--rocksdb-storage', '--data', data_dir]) + manager = self._build(['--x-enable-event-queue', '--data', data_dir]) self.assertIsInstance(manager._event_manager, EventManager) self.assertIsInstance(manager._event_manager._event_storage, EventRocksDBStorage) self.assertIsInstance(manager._event_manager._event_ws_factory, EventWebsocketFactory) self.assertTrue(manager._enable_event_queue) - - def test_event_queue_with_memory_storage(self): - manager = self._build(['--x-enable-event-queue', '--memory-storage']) - - self.assertIsInstance(manager._event_manager, EventManager) - self.assertIsInstance(manager._event_manager._event_storage, EventMemoryStorage) - self.assertIsInstance(manager._event_manager._event_ws_factory, EventWebsocketFactory) - self.assertTrue(manager._enable_event_queue) diff --git a/tests/others/test_hathor_settings.py b/tests/others/test_hathor_settings.py index ba8a258e6..a69107b69 100644 --- a/tests/others/test_hathor_settings.py +++ b/tests/others/test_hathor_settings.py @@ -20,17 +20,10 @@ from pydantic import ValidationError from hathor.checkpoint import Checkpoint -from hathor.conf import ( - MAINNET_SETTINGS_FILEPATH, - NANO_TESTNET_SETTINGS_FILEPATH, - TESTNET_SETTINGS_FILEPATH, - UNITTESTS_SETTINGS_FILEPATH, -) +from hathor.conf import MAINNET_SETTINGS_FILEPATH, TESTNET_SETTINGS_FILEPATH from hathor.conf.mainnet import SETTINGS as MAINNET_SETTINGS -from hathor.conf.nano_testnet import SETTINGS as NANO_TESTNET_SETTINGS from hathor.conf.settings import DECIMAL_PLACES, GENESIS_TOKEN_UNITS, GENESIS_TOKENS, HathorSettings from hathor.conf.testnet import SETTINGS as TESTNET_SETTINGS -from hathor.conf.unittests import SETTINGS as UNITTESTS_SETTINGS @pytest.mark.parametrize('filepath', ['fixtures/valid_hathor_settings_fixture.yml']) @@ -247,11 +240,3 @@ def test_mainnet_settings_migration(): def test_testnet_settings_migration(): assert TESTNET_SETTINGS == HathorSettings.from_yaml(filepath=TESTNET_SETTINGS_FILEPATH) - - -def test_unittests_settings_migration(): - assert UNITTESTS_SETTINGS == HathorSettings.from_yaml(filepath=UNITTESTS_SETTINGS_FILEPATH) - - -def test_nano_testnet_settings_migration(): - assert NANO_TESTNET_SETTINGS == HathorSettings.from_yaml(filepath=NANO_TESTNET_SETTINGS_FILEPATH) diff --git a/tests/others/test_init_manager.py b/tests/others/test_init_manager.py index 7d60ed8fb..71b844abf 100644 --- a/tests/others/test_init_manager.py +++ b/tests/others/test_init_manager.py @@ -1,18 +1,29 @@ from typing import Iterator +from hathor.conf.settings import HathorSettings from hathor.pubsub import PubSubManager from hathor.simulator.utils import add_new_block, add_new_blocks +from hathor.storage import RocksDBStorage from hathor.transaction import BaseTransaction -from hathor.transaction.storage import TransactionMemoryStorage +from hathor.transaction.storage import TransactionRocksDBStorage +from hathor.transaction.vertex_parser import VertexParser from tests import unittest from tests.unittest import TestBuilder from tests.utils import add_blocks_unlock_reward, add_new_double_spending, add_new_transactions -class ModifiedTransactionMemoryStorage(TransactionMemoryStorage): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._first_tx = None +class ModifiedTransactionRocksDBStorage(TransactionRocksDBStorage): + def __init__(self, path: str, settings: HathorSettings): + from hathor.nanocontracts.storage import NCRocksDBStorageFactory + rocksdb_storage = RocksDBStorage(path=path) + nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + super().__init__( + rocksdb_storage=rocksdb_storage, + settings=settings, + vertex_parser=VertexParser(settings=settings), + nc_storage_factory=nc_storage_factory, + ) + self._first_tx: BaseTransaction | None = None def set_first_tx(self, tx: BaseTransaction) -> None: self._first_tx = tx @@ -30,7 +41,8 @@ def _get_all_transactions(self) -> Iterator[BaseTransaction]: class SimpleManagerInitializationTestCase(unittest.TestCase): def setUp(self): super().setUp() - self.tx_storage = ModifiedTransactionMemoryStorage(settings=self._settings) + self.path = self.mkdtemp() + self.tx_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) self.pubsub = PubSubManager(self.clock) def test_invalid_arguments(self): @@ -89,7 +101,8 @@ def test_wrong_stop(self): class ManagerInitializationTestCase(unittest.TestCase): def setUp(self): super().setUp() - self.tx_storage = ModifiedTransactionMemoryStorage(settings=self._settings) + self.path = self.mkdtemp() + self.tx_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) self.network = 'testnet' self.manager = self.create_peer(self.network, tx_storage=self.tx_storage) @@ -128,8 +141,14 @@ def test_init_good_order(self): self.assertEqual(seen, self.all_hashes) # a new manager must be successfully initialized - self.tx_storage.reset_indexes() - self.create_peer('testnet', tx_storage=self.tx_storage) + self.manager.stop() + self.tx_storage._rocksdb_storage.close() + new_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + artifacts = self.get_builder().set_tx_storage(new_storage).build() + artifacts.manager.start() + self.clock.run() + self.clock.advance(5) + assert set(tx.hash for tx in artifacts.manager.tx_storage.get_all_transactions()) == self.all_hashes def test_init_unfavorable_order(self): """We force the first element of `get_all_transactions` to be a transaction @@ -147,23 +166,34 @@ def test_init_unfavorable_order(self): self.assertEqual(seen, self.all_hashes) # a new manager must be successfully initialized - self.tx_storage.reset_indexes() - self.create_peer('testnet', tx_storage=self.tx_storage) + self.manager.stop() + self.tx_storage._rocksdb_storage.close() + new_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + artifacts = self.get_builder().set_tx_storage(new_storage).build() + artifacts.manager.start() + self.clock.run() + self.clock.advance(5) + assert set(tx.hash for tx in artifacts.manager.tx_storage.get_all_transactions()) == self.all_hashes def test_init_not_voided_tips(self): # add a bunch of blocks and transactions for i in range(30): - add_new_block(self.manager, advance_clock=15) - add_new_transactions(self.manager, 5, advance_clock=15) + blk = add_new_block(self.manager, advance_clock=15) + txs = add_new_transactions(self.manager, 5, advance_clock=15) + self.all_hashes.add(blk.hash) + self.all_hashes.update(x.hash for x in txs) # add a bunch of conflicting transactions, these will all become voided for i in range(50): - add_new_double_spending(self.manager) + tx = add_new_double_spending(self.manager) + self.all_hashes.add(tx.hash) # finish up with another bunch of blocks and transactions for i in range(30): - add_new_block(self.manager, advance_clock=15) - add_new_transactions(self.manager, 5, advance_clock=15) + blk = add_new_block(self.manager, advance_clock=15) + txs = add_new_transactions(self.manager, 5, advance_clock=15) + self.all_hashes.add(blk.hash) + self.all_hashes.update(x.hash for x in txs) # not the point of this test, but just a sanity check self.assertConsensusValid(self.manager) @@ -172,9 +202,15 @@ def test_init_not_voided_tips(self): self.assertEqual(50, sum(bool(tx.get_metadata().voided_by) for tx in self.tx_storage.get_all_transactions())) # create a new manager (which will initialize in the self.create_peer call) - self.tx_storage.reset_indexes() self.manager.stop() - manager = self.create_peer(self.network, tx_storage=self.tx_storage, full_verification=False) + self.tx_storage._rocksdb_storage.close() + new_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + artifacts = self.get_builder().set_tx_storage(new_storage).build() + manager = artifacts.manager + manager.start() + self.clock.run() + self.clock.advance(5) + assert set(tx.hash for tx in manager.tx_storage.get_all_transactions()) == self.all_hashes # make sure none of its tx tips are voided all_tips = manager.generate_parent_txs(None).get_all_tips() diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 6b4c85cf3..e4c6decd2 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -1,13 +1,15 @@ import tempfile from unittest.mock import Mock +from hathor.manager import HathorManager from hathor.p2p.manager import PeerConnectionsMetrics from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_endpoint import PeerEndpoint from hathor.p2p.protocol import HathorProtocol from hathor.pubsub import HathorEvents from hathor.simulator.utils import add_new_blocks -from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage +from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage +from hathor.transaction.vertex_parser import VertexParser from hathor.wallet import Wallet from tests import unittest @@ -21,9 +23,7 @@ def test_p2p_network_events(self): the event to set its own fields related to the network peers """ # Preparation - self.use_memory_storage = True manager = self.create_peer('testnet') - self.assertIsInstance(manager.tx_storage, TransactionMemoryStorage) pubsub = manager.pubsub # Execution @@ -49,7 +49,7 @@ def test_connections_manager_integration(self): to update the Metrics class with info from ConnectionsManager class """ # Preparation - tx_storage = TransactionMemoryStorage(settings=self._settings) + tx_storage = self.create_tx_storage() tmpdir = tempfile.mkdtemp() self.tmpdirs.append(tmpdir) wallet = Wallet(directory=tmpdir) @@ -86,14 +86,12 @@ def test_tx_storage_data_collection_with_rocksdb_storage_and_no_cache(self): The expected result is that it will successfully collect the RocksDB metrics. """ - path = tempfile.mkdtemp() - self.tmpdirs.append(path) - - def _init_manager(): + def _init_manager(path: tempfile.TemporaryDirectory | None = None) -> HathorManager: builder = self.get_builder() \ - .use_rocksdb(path, cache_capacity=100) \ - .force_memory_index() \ + .set_rocksdb_cache_capacity(100) \ .set_wallet(self._create_test_wallet(unlocked=True)) + if path: + builder.set_rocksdb_path(path) manager = self.create_peer_from_builder(builder, start_manager=False) return manager @@ -110,6 +108,15 @@ def _init_manager(): b'event': 0.0, b'event-metadata': 0.0, b'feature-activation-metadata': 0.0, + b'info-index': 0.0, + b'height-index': 0.0, + b'tips-all': 0.0, + b'tips-blocks': 0.0, + b'tips-txs': 0.0, + b'timestamp-sorted-all': 0.0, + b'timestamp-sorted-blocks': 0.0, + b'timestamp-sorted-txs': 0.0, + b'nc-state': 0.0, }) manager.tx_storage.pre_init() @@ -122,7 +129,7 @@ def _init_manager(): # https://github.com/facebook/rocksdb/blob/v7.5.3/include/rocksdb/db.h#L1396 manager.tx_storage._db.close() - manager = _init_manager() + manager = _init_manager(manager.tx_storage._rocksdb_storage.temp_dir) manager.metrics._collect_data() # We don't know exactly the sizes of each column family, @@ -137,15 +144,13 @@ def test_tx_storage_data_collection_with_rocksdb_storage_and_cache(self): The expected result is that it will successfully collect the RocksDB metrics. """ - path = tempfile.mkdtemp() - self.tmpdirs.append(path) - - def _init_manager(): + def _init_manager(path: tempfile.TemporaryDirectory | None = None) -> HathorManager: builder = self.get_builder() \ - .use_rocksdb(path, cache_capacity=100) \ - .force_memory_index() \ + .set_rocksdb_cache_capacity(100) \ .set_wallet(self._create_test_wallet(unlocked=True)) \ .use_tx_storage_cache() + if path: + builder.set_rocksdb_path(path) manager = self.create_peer_from_builder(builder, start_manager=False) return manager @@ -163,6 +168,15 @@ def _init_manager(): b'event': 0.0, b'event-metadata': 0.0, b'feature-activation-metadata': 0.0, + b'info-index': 0.0, + b'height-index': 0.0, + b'tips-all': 0.0, + b'tips-blocks': 0.0, + b'tips-txs': 0.0, + b'timestamp-sorted-all': 0.0, + b'timestamp-sorted-blocks': 0.0, + b'timestamp-sorted-txs': 0.0, + b'nc-state': 0.0, }) manager.tx_storage.pre_init() @@ -176,7 +190,7 @@ def _init_manager(): # https://github.com/facebook/rocksdb/blob/v7.5.3/include/rocksdb/db.h#L1396 manager.tx_storage.store._db.close() - manager = _init_manager() + manager = _init_manager(manager.tx_storage.store._rocksdb_storage.temp_dir) manager.metrics._collect_data() # We don't know exactly the sizes of each column family, @@ -184,30 +198,12 @@ def _init_manager(): self.assertTrue(manager.metrics.rocksdb_cfs_sizes[b'tx'] > 500) self.assertTrue(manager.metrics.rocksdb_cfs_sizes[b'meta'] > 1000) - def test_tx_storage_data_collection_with_memory_storage(self): - """Tests storage data collection when using Memory Storage using no cache - We don't allow using it with cache, so this is the only case - - The expected result is that nothing is done, because we currently only collect - data for RocksDB storage - """ - tx_storage = TransactionMemoryStorage(settings=self._settings) - - # All - manager = self.create_peer('testnet', tx_storage=tx_storage) - - manager.metrics._collect_data() - - self.assertEqual(manager.metrics.rocksdb_cfs_sizes, {}) - def test_peer_connections_data_collection(self): """Test if peer connections data is correctly being collected from the ConnectionsManager """ # Preparation - self.use_memory_storage = True manager = self.create_peer('testnet') - self.assertIsInstance(manager.tx_storage, TransactionMemoryStorage) my_peer = manager.my_peer @@ -259,9 +255,24 @@ def test_cache_data_collection(self): """Test if cache-related data is correctly being collected from the TransactionCacheStorage """ + from hathor.nanocontracts.storage import NCRocksDBStorageFactory + # Preparation - base_storage = TransactionMemoryStorage(settings=self._settings) - tx_storage = TransactionCacheStorage(base_storage, self.clock, indexes=None, settings=self._settings) + rocksdb_storage = self.create_rocksdb_storage() + nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + base_storage = TransactionRocksDBStorage( + rocksdb_storage=rocksdb_storage, + settings=self._settings, + vertex_parser=VertexParser(settings=self._settings), + nc_storage_factory=nc_storage_factory, + ) + tx_storage = TransactionCacheStorage( + base_storage, + self.clock, + indexes=None, + settings=self._settings, + nc_storage_factory=nc_storage_factory, + ) manager = self.create_peer('testnet', tx_storage=tx_storage) diff --git a/tests/p2p/test_double_spending.py b/tests/p2p/test_double_spending.py index 1cfedead8..abfa4e202 100644 --- a/tests/p2p/test_double_spending.py +++ b/tests/p2p/test_double_spending.py @@ -63,14 +63,14 @@ def test_simple_double_spending(self) -> None: self.assertNotEqual(tx1.hash, tx3.hash) self.assertNotEqual(tx2.hash, tx3.hash) - self.assertTrue(self.manager1.propagate_tx(tx1, False)) + self.assertTrue(self.manager1.propagate_tx(tx1)) self.run_to_completion() meta1 = tx1.get_metadata() self.assertEqual(meta1.conflict_with, None) self.assertEqual(meta1.voided_by, None) # Propagate a conflicting transaction. - self.assertTrue(self.manager1.propagate_tx(tx2, False)) + self.assertTrue(self.manager1.propagate_tx(tx2)) self.run_to_completion() meta1 = tx1.get_metadata(force_reload=True) @@ -225,7 +225,7 @@ def test_double_spending_propagation(self) -> None: # --- self.clock.advance(15) - self.assertTrue(self.manager1.propagate_tx(tx4, False)) + self.assertTrue(self.manager1.propagate_tx(tx4)) self.clock.advance(15) self.run_to_completion() @@ -287,7 +287,7 @@ def test_double_spending_propagation(self) -> None: tx7.timestamp = int(self.clock.seconds()) self.manager1.cpu_mining_service.resolve(tx7) self.clock.advance(15) - self.manager1.propagate_tx(tx7, False) + self.manager1.propagate_tx(tx7) self.clock.advance(15) meta1 = tx1.get_metadata(force_reload=True) diff --git a/tests/p2p/test_sync_v2.py b/tests/p2p/test_sync_v2.py index 579175ae2..777549fbf 100644 --- a/tests/p2p/test_sync_v2.py +++ b/tests/p2p/test_sync_v2.py @@ -41,7 +41,7 @@ def _get_partial_blocks(self, tx_storage: TransactionStorage) -> set[VertexId]: partial_blocks.add(tx.hash) return partial_blocks - def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bool) -> None: + def _run_restart_test(self, *, use_tx_storage_cache: bool) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() @@ -63,11 +63,9 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo gen_tx1.stop() # Create a new peer and run sync for a while (but stop before getting synced). - path = self.mkdtemp() peer = PrivatePeer.auto_generated() builder2 = self.simulator.get_default_builder() \ - .set_peer(peer) \ - .use_rocksdb(path) + .set_peer(peer) manager2 = self.simulator.create_peer(builder2) conn12 = FakeConnection(manager1, manager2, latency=0.05) @@ -92,6 +90,7 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo self.simulator.remove_connection(conn12) manager2.stop() assert isinstance(manager2.tx_storage, TransactionRocksDBStorage) + temp_dir = not_none(manager2.tx_storage._rocksdb_storage.temp_dir) manager2.tx_storage._rocksdb_storage.close() del manager2 @@ -104,12 +103,7 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo # Restart full node using the same db. builder3 = self.simulator.get_default_builder() \ .set_peer(peer) \ - .use_rocksdb(path) - - if full_verification: - builder3.enable_full_verification() - else: - builder3.disable_full_verification() + .set_rocksdb_path(temp_dir) if use_tx_storage_cache: builder3.use_tx_storage_cache() @@ -146,17 +140,11 @@ def _run_restart_test(self, *, full_verification: bool, use_tx_storage_cache: bo self.assertEqual(manager1.tx_storage.get_vertices_count(), manager3.tx_storage.get_vertices_count()) self.assertConsensusEqualSyncV2(manager1, manager3) - def test_restart_fullnode_full_verification(self) -> None: - self._run_restart_test(full_verification=True, use_tx_storage_cache=False) - def test_restart_fullnode_quick(self) -> None: - self._run_restart_test(full_verification=False, use_tx_storage_cache=False) + self._run_restart_test(use_tx_storage_cache=False) def test_restart_fullnode_quick_with_cache(self) -> None: - self._run_restart_test(full_verification=False, use_tx_storage_cache=True) - - def test_restart_fullnode_full_verification_with_cache(self) -> None: - self._run_restart_test(full_verification=True, use_tx_storage_cache=True) + self._run_restart_test(use_tx_storage_cache=True) def test_exceeds_streaming_and_mempool_limits(self) -> None: manager1 = self.create_peer() diff --git a/tests/poa/test_poa_simulation.py b/tests/poa/test_poa_simulation.py index 60893457a..73776f32a 100644 --- a/tests/poa/test_poa_simulation.py +++ b/tests/poa/test_poa_simulation.py @@ -75,7 +75,7 @@ def _assert_height_weight_signer_id( class PoaSimulationTest(SimulatorTestCase): def _get_manager(self, signer: PoaSigner | None = None) -> HathorManager: - builder = self.simulator.get_default_builder().disable_full_verification() + builder = self.simulator.get_default_builder() if signer: builder.set_poa_signer(signer) artifacts = self.simulator.create_artifacts(builder) @@ -324,19 +324,16 @@ def test_producer_leave_and_comeback(self) -> None: ) def test_existing_storage(self) -> None: - import tempfile - rocksdb_directory = tempfile.mkdtemp() - self.tmpdirs.append(rocksdb_directory) signer = get_signer() signer_id = signer._signer_id self.simulator.settings = get_settings(signer, time_between_blocks=10) builder = self.simulator.get_default_builder() \ .set_poa_signer(signer) \ - .use_rocksdb(path=rocksdb_directory) artifacts1 = self.simulator.create_artifacts(builder) manager1 = artifacts1.manager + rocksdb_dir = not_none(artifacts1.rocksdb_storage.temp_dir) manager1.allow_mining_without_peers() self.simulator.run(50) @@ -357,7 +354,7 @@ def test_existing_storage(self) -> None: builder = self.simulator.get_default_builder() \ .set_poa_signer(signer) \ - .use_rocksdb(path=rocksdb_directory) + .set_rocksdb_path(path=rocksdb_dir) artifacts = self.simulator.create_artifacts(builder) manager2 = artifacts.manager @@ -419,8 +416,7 @@ def test_new_signer_added(self) -> None: builder_1b = self.simulator.get_default_builder() \ .set_tx_storage(storage_1a) \ - .set_poa_signer(signer1) \ - .disable_full_verification() + .set_poa_signer(signer1) artifacts_1b = self.simulator.create_artifacts(builder_1b) manager_1b = artifacts_1b.manager manager_1b.allow_mining_without_peers() @@ -569,4 +565,4 @@ def test_use_case(self) -> None: token_tx.inputs[0].data = P2PKH.create_input_data(public_key_bytes, signature) token_tx.update_hash() - assert manager.on_new_tx(token_tx, fails_silently=False) + assert manager.on_new_tx(token_tx) diff --git a/tests/poa/test_poa_verification.py b/tests/poa/test_poa_verification.py index 5ff4e1def..c87efebe7 100644 --- a/tests/poa/test_poa_verification.py +++ b/tests/poa/test_poa_verification.py @@ -68,22 +68,22 @@ def _get_valid_poa_block(self) -> PoaBlock: def test_poa_block_verify_basic(self) -> None: block = self._get_valid_poa_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) verify_poa_wrapped = Mock(wraps=self.verifiers.poa_block.verify_poa) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), patch.object(PoaBlockVerifier, 'verify_poa', verify_poa_wrapped), ): - self.manager.verification_service.verify_basic(block) + self.manager.verification_service.verify_basic(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() # Block methods verify_weight_wrapped.assert_not_called() @@ -111,7 +111,7 @@ def test_poa_block_verify_without_storage(self) -> None: patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(block) + self.manager.verification_service.verify_without_storage(block, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -128,6 +128,7 @@ def test_poa_block_verify(self) -> None: block = self._get_valid_poa_block() verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) @@ -141,6 +142,7 @@ def test_poa_block_verify(self) -> None: with ( patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), @@ -151,10 +153,11 @@ def test_poa_block_verify(self) -> None: patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() # Block methods verify_pow_wrapped.assert_not_called() @@ -170,22 +173,22 @@ def test_poa_block_verify(self) -> None: def test_poa_block_validate_basic(self) -> None: block = self._get_valid_poa_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) verify_poa_wrapped = Mock(wraps=self.verifiers.poa_block.verify_poa) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), patch.object(PoaBlockVerifier, 'verify_poa', verify_poa_wrapped), ): - self.manager.verification_service.validate_basic(block) + self.manager.verification_service.validate_basic(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() # Block methods verify_weight_wrapped.assert_not_called() @@ -196,7 +199,7 @@ def test_poa_block_validate_basic(self) -> None: self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(block) + self.manager.verification_service.validate_full(block, self.verification_params) self.assertEqual(block.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -207,7 +210,7 @@ def test_poa_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), ): - self.manager.verification_service.validate_basic(block) + self.manager.verification_service.validate_basic(block, self.verification_params) # Block methods verify_weight_wrapped2.assert_not_called() @@ -219,8 +222,9 @@ def test_poa_block_validate_basic(self) -> None: def test_poa_block_validate_full(self) -> None: block = self._get_valid_poa_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) @@ -236,8 +240,9 @@ def test_poa_block_validate_full(self) -> None: verify_poa_wrapped = Mock(wraps=self.verifiers.poa_block.verify_poa) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), @@ -251,11 +256,12 @@ def test_poa_block_validate_full(self) -> None: patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), patch.object(PoaBlockVerifier, 'verify_poa', verify_poa_wrapped), ): - self.manager.verification_service.validate_full(block) + self.manager.verification_service.validate_full(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() verify_outputs_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() # Block methods verify_pow_wrapped.assert_not_called() diff --git a/tests/resources/event/test_event.py b/tests/resources/event/test_event.py index fbf32240e..c1e2cd0ec 100644 --- a/tests/resources/event/test_event.py +++ b/tests/resources/event/test_event.py @@ -18,14 +18,17 @@ from hathor.event import EventManager from hathor.event.resources.event import EventResource -from hathor.event.storage import EventMemoryStorage +from hathor.event.storage import EventRocksDBStorage +from hathor.storage import RocksDBStorage from tests.resources.base_resource import StubSite from tests.utils import EventMocker @pytest.fixture def web(): - event_storage = EventMemoryStorage() + event_storage = EventRocksDBStorage( + rocksdb_storage=RocksDBStorage.create_temp(), + ) for i in range(3): event = EventMocker.create_event(i) diff --git a/tests/resources/nanocontracts/__init__.py b/tests/resources/nanocontracts/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/resources/nanocontracts/base_resource.py b/tests/resources/nanocontracts/base_resource.py new file mode 100644 index 000000000..8921c4a73 --- /dev/null +++ b/tests/resources/nanocontracts/base_resource.py @@ -0,0 +1,40 @@ +from hathor.manager import HathorManager +from hathor.nanocontracts import Blueprint, OnChainBlueprint +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.types import BlueprintId +from tests.resources.base_resource import _BaseResourceTest + + +class GenericNanoResourceTest(_BaseResourceTest._ResourceTest): + __test__ = False + + def create_builtin_blueprint( + self, + manager: HathorManager, + blueprint_id: BlueprintId, + blueprint_class: type[Blueprint], + ) -> None: + manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + blueprint_id: blueprint_class, + }) + + def create_on_chain_blueprint(self, manager: HathorManager, nc_code: str) -> OnChainBlueprint: + from hathor.nanocontracts.on_chain_blueprint import Code + from tests.nanocontracts.on_chain_blueprints.utils import get_ocb_private_key + code = Code.from_python_code(nc_code, self._settings) + timestamp = manager.tx_storage.latest_timestamp + 1 + parents = manager.get_new_tx_parents(timestamp) + blueprint = OnChainBlueprint( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=manager.tx_storage, + timestamp=timestamp, + code=code, + ) + blueprint.weight = manager.daa.minimum_tx_weight(blueprint) + blueprint.sign(get_ocb_private_key()) + manager.cpu_mining_service.resolve(blueprint) + manager.reactor.advance(2) # type: ignore + return blueprint diff --git a/tests/resources/nanocontracts/dummy_blueprint.py b/tests/resources/nanocontracts/dummy_blueprint.py new file mode 100644 index 000000000..911673714 --- /dev/null +++ b/tests/resources/nanocontracts/dummy_blueprint.py @@ -0,0 +1,18 @@ +from hathor.nanocontracts import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.types import public + + +class TestBlueprint(Blueprint): + """ This class is used by the test for the blueprint source code resource + It must be in a separate file for the assert in the test + """ + int_attribute: int + + @public + def initialize(self, ctx: Context) -> None: + self.int_attribute = 0 + + @public + def sum(self, ctx: Context, arg1: int) -> None: + self.int_attribute += arg1 diff --git a/tests/resources/nanocontracts/my_blueprint.py b/tests/resources/nanocontracts/my_blueprint.py new file mode 100644 index 000000000..59efb7578 --- /dev/null +++ b/tests/resources/nanocontracts/my_blueprint.py @@ -0,0 +1,58 @@ +from typing import Optional + +from hathor.nanocontracts import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.types import Address, Amount, SignedData, Timestamp, TokenUid, TxOutputScript, public, view + + +class MyBlueprint(Blueprint): + a_int: int + a_str: str + a_bool: bool + a_address: Address + a_amount: Amount + a_timestamp: Timestamp + a_token_uid: TokenUid + a_script: TxOutputScript + a_signed_data: SignedData[str] + a_dict: dict[str, int] + a_tuple: tuple[str, int, bool] + a_dict_dict_tuple: dict[str, tuple[str, int]] + a_optional_int: Optional[int] + + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context, arg1: int, arg2: SignedData[str]) -> None: + """No operation.""" + self.a = arg1 + + @view + def my_private_method_nop(self, arg1: int) -> int: + return 1 + + @view + def my_private_method_2(self) -> dict[dict[str, int], tuple[bool, str, int, int]]: + return {} + + @view + def my_private_method_3(self) -> list[str]: + return [] + + @view + def my_private_method_4(self) -> set[int]: + return set() + + @view + def my_private_method_5(self) -> str | None: + return None + + @view + def my_private_method_6(self) -> None | str: + return None + + @view + def my_private_method_7(self) -> str | int | bool | None: + return 0 diff --git a/tests/resources/nanocontracts/test_blueprint.py b/tests/resources/nanocontracts/test_blueprint.py new file mode 100644 index 000000000..aa2e34790 --- /dev/null +++ b/tests/resources/nanocontracts/test_blueprint.py @@ -0,0 +1,156 @@ +from collections.abc import Generator +from typing import Any + +from twisted.internet.defer import Deferred, inlineCallbacks + +from hathor.nanocontracts.resources.blueprint import BlueprintInfoResource +from hathor.nanocontracts.types import BlueprintId, VertexId +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb +from hathor.simulator.utils import add_new_blocks +from tests.resources.base_resource import StubSite +from tests.resources.nanocontracts.base_resource import GenericNanoResourceTest + + +class BaseBlueprintInfoTest(GenericNanoResourceTest): + # this is what subclasses have to define + blueprint_id: BlueprintId + + def setUp(self): + super().setUp() + self.manager = self.create_peer('unittests') + self.web = StubSite(BlueprintInfoResource(self.manager)) + + @inlineCallbacks + def test_fail_missing_id(self) -> Generator[Deferred[Any], Any, None]: + response1 = yield self.web.get('blueprint') + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_invalid_id(self) -> Generator[Deferred[Any], Any, None]: + response1 = yield self.web.get( + 'blueprint', + { + b'blueprint_id': b'xxx', + } + ) + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_unknown_id(self) -> Generator[Deferred[Any], Any, None]: + response1 = yield self.web.get( + 'blueprint', + { + b'blueprint_id': b'0' * 32, + } + ) + self.assertEqual(404, response1.responseCode) + + @inlineCallbacks + def test_success(self) -> Generator[Deferred[Any], Any, None]: + response1 = yield self.web.get( + 'blueprint', + { + b'blueprint_id': bytes(self.blueprint_id.hex(), 'utf-8'), + } + ) + data = response1.json_value() + + self.assertEqual(data['id'], self.blueprint_id.hex()) + self.assertEqual(data['name'], 'MyBlueprint') + self.assertEqual(data['attributes'], { + 'a_int': 'int', + 'a_str': 'str', + 'a_bool': 'bool', + 'a_address': 'Address', + 'a_amount': 'Amount', + 'a_timestamp': 'Timestamp', + 'a_token_uid': 'TokenUid', + 'a_script': 'TxOutputScript', + 'a_signed_data': 'SignedData[str]', + 'a_dict': 'dict[str, int]', + 'a_tuple': 'tuple[str, int, bool]', + 'a_dict_dict_tuple': 'dict[str, tuple[str, int]]', + 'a_optional_int': 'int?', + }) + self.assertEqual(data['public_methods'], { + 'initialize': { + 'args': [], + 'return_type': 'null', + 'docstring': None, + }, + 'nop': { + 'args': [{ + 'name': 'arg1', + 'type': 'int' + }, { + 'name': 'arg2', + 'type': 'SignedData[str]', + }], + 'return_type': 'null', + 'docstring': 'No operation.', + }, + }) + expected_data = { + 'my_private_method_nop': { + 'args': [{ + 'name': 'arg1', + 'type': 'int', + }], + 'return_type': 'int', + 'docstring': None, + }, + 'my_private_method_2': { + 'args': [], + 'return_type': 'dict[dict[str, int], tuple[bool, str, int, int]]', + 'docstring': None, + }, + 'my_private_method_3': { + 'args': [], + 'return_type': 'list[str]', + 'docstring': None, + }, + 'my_private_method_4': { + 'args': [], + 'return_type': 'set[int]', + 'docstring': None, + }, + 'my_private_method_5': { + 'args': [], + 'return_type': 'str?', + 'docstring': None, + }, + 'my_private_method_6': { + 'args': [], + 'return_type': 'str?', + 'docstring': None, + }, + 'my_private_method_7': { + 'args': [], + 'return_type': 'union[str, int, bool, null]', + 'docstring': None, + }, + } + self.assertEqual(data['private_methods'], expected_data) + + +class BuiltinBlueprintInfoTest(BaseBlueprintInfoTest): + __test__ = True + + def setUp(self): + super().setUp() + from tests.resources.nanocontracts import my_blueprint + self.blueprint_id = BlueprintId(VertexId(b'3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595')) + self.create_builtin_blueprint(self.manager, self.blueprint_id, my_blueprint.MyBlueprint) + + +class OCBBlueprintInfoTest(BaseBlueprintInfoTest): + __test__ = True + + def setUp(self): + super().setUp() + from tests.resources import nanocontracts + nc_code = load_builtin_blueprint_for_ocb('my_blueprint.py', 'MyBlueprint', nanocontracts) + blueprint = self.create_on_chain_blueprint(self.manager, nc_code) + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + add_new_blocks(self.manager, 1, advance_clock=30) # confirm the on-chain blueprint vertex + self.blueprint_id = BlueprintId(VertexId(blueprint.hash)) diff --git a/tests/resources/nanocontracts/test_blueprint_source_code.py b/tests/resources/nanocontracts/test_blueprint_source_code.py new file mode 100644 index 000000000..8bd1fe6b2 --- /dev/null +++ b/tests/resources/nanocontracts/test_blueprint_source_code.py @@ -0,0 +1,121 @@ +from twisted.internet.defer import inlineCallbacks + +from hathor.nanocontracts.resources import BlueprintSourceCodeResource +from hathor.nanocontracts.types import BlueprintId +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb +from hathor.simulator.utils import add_new_blocks +from tests.resources.base_resource import StubSite +from tests.resources.nanocontracts.base_resource import GenericNanoResourceTest + + +class BaseBlueprintSourceCodeTest(GenericNanoResourceTest): + __test__ = False + + # this is what subclasses have to define + blueprint_id: BlueprintId + blueprint_source: str + + def setUp(self): + super().setUp() + self.manager = self.create_peer('unittests') + self.web = StubSite(BlueprintSourceCodeResource(self.manager)) + + @inlineCallbacks + def test_fail_missing_id(self): + response1 = yield self.web.get('blueprint/source') + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_invalid_id(self): + response1 = yield self.web.get( + 'blueprint/source', + { + b'blueprint_id': b'xxx', + } + ) + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_unknown_id(self): + response1 = yield self.web.get( + 'blueprint/source', + { + b'blueprint_id': b'0' * 32, + } + ) + self.assertEqual(404, response1.responseCode) + + @inlineCallbacks + def test_success(self): + response1 = yield self.web.get( + 'blueprint/source', + { + b'blueprint_id': bytes(self.blueprint_id.hex(), 'utf-8'), + } + ) + data = response1.json_value() + self.assertEqual(self.blueprint_source, data['source_code']) + + +class BuiltinBlueprintSourceCodeTest(BaseBlueprintSourceCodeTest): + __test__ = True + + blueprint_source = r'''from hathor.nanocontracts import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.types import public + + +class TestBlueprint(Blueprint): + """ This class is used by the test for the blueprint source code resource + It must be in a separate file for the assert in the test + """ + int_attribute: int + + @public + def initialize(self, ctx: Context) -> None: + self.int_attribute = 0 + + @public + def sum(self, ctx: Context, arg1: int) -> None: + self.int_attribute += arg1 +''' + + def setUp(self): + super().setUp() + from tests.resources.nanocontracts import dummy_blueprint + self.blueprint_id = BlueprintId(b'3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595') + self.create_builtin_blueprint(self.manager, self.blueprint_id, dummy_blueprint.TestBlueprint) + + +class OCBBlueprintSourceCodeTest(BaseBlueprintSourceCodeTest): + __test__ = True + + blueprint_source = r'''from hathor.nanocontracts import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.types import public + + +class TestBlueprint(Blueprint): + """ This class is used by the test for the blueprint source code resource + It must be in a separate file for the assert in the test + """ + int_attribute: int + + @public + def initialize(self, ctx: Context) -> None: + self.int_attribute = 0 + + @public + def sum(self, ctx: Context, arg1: int) -> None: + self.int_attribute += arg1 +__blueprint__ = TestBlueprint +''' + + def setUp(self): + super().setUp() + from tests.resources import nanocontracts + nc_code = load_builtin_blueprint_for_ocb('dummy_blueprint.py', 'TestBlueprint', nanocontracts) + blueprint = self.create_on_chain_blueprint(self.manager, nc_code) + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + add_new_blocks(self.manager, 1, advance_clock=30) # confirm the on-chain blueprint vertex + self.blueprint_id = BlueprintId(blueprint.hash) diff --git a/tests/resources/nanocontracts/test_builtin.py b/tests/resources/nanocontracts/test_builtin.py new file mode 100644 index 000000000..4f1a7b22c --- /dev/null +++ b/tests/resources/nanocontracts/test_builtin.py @@ -0,0 +1,277 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.resources.builtin import BlueprintBuiltinResource +from tests.resources.base_resource import StubSite, _BaseResourceTest + + +class MyBlueprint1(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + +class MyBlueprint2(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + +class BlueprintBuiltinResourceTest(_BaseResourceTest._ResourceTest): + + def setUp(self): + super().setUp() + self.manager = self.create_peer( + 'unittests', + nc_indexes=True, + ) + self.web = StubSite(BlueprintBuiltinResource(self.manager)) + + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + (b'\x11' * 32): MyBlueprint1, + (b'\x22' * 32): MyBlueprint2, + (b'\x33' * 32): MyBlueprint2, + (b'\x44' * 32): MyBlueprint2, + (b'\x55' * 32): MyBlueprint2, + }) + + async def test_success(self) -> None: + response = await self.web.get('builtin') + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[ + dict(id='11' * 32, name='MyBlueprint1'), + dict(id='22' * 32, name='MyBlueprint2'), + dict(id='33' * 32, name='MyBlueprint2'), + dict(id='44' * 32, name='MyBlueprint2'), + dict(id='55' * 32, name='MyBlueprint2'), + ], + ) + + async def test_pagination(self) -> None: + response = await self.web.get('builtin', { + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=2, + has_more=True, + blueprints=[ + dict(id='11' * 32, name='MyBlueprint1'), + dict(id='22' * 32, name='MyBlueprint2'), + ], + ) + + after = '22' * 32 + response = await self.web.get('builtin', { + b'after': after.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=True, + blueprints=[ + dict(id='33' * 32, name='MyBlueprint2'), + dict(id='44' * 32, name='MyBlueprint2'), + ], + ) + + after = '44' * 32 + response = await self.web.get('builtin', { + b'after': after.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=False, + blueprints=[ + dict(id='55' * 32, name='MyBlueprint2'), + ], + ) + + after = '55' * 32 + response = await self.web.get('builtin', { + b'after': after.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=False, + blueprints=[], + ) + + before = '55' * 32 + response = await self.web.get('builtin', { + b'before': before.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=True, + blueprints=[ + dict(id='44' * 32, name='MyBlueprint2'), + dict(id='33' * 32, name='MyBlueprint2'), + ], + ) + + before = '33' * 32 + response = await self.web.get('builtin', { + b'before': before.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=False, + blueprints=[ + dict(id='22' * 32, name='MyBlueprint2'), + dict(id='11' * 32, name='MyBlueprint1'), + ], + ) + + before = '11' * 32 + response = await self.web.get('builtin', { + b'before': before.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=False, + blueprints=[], + ) + + async def test_search_by_id(self) -> None: + bp_id = '33' * 32 + response = await self.web.get('builtin', { + b'search': bp_id.encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[ + dict(id=bp_id, name='MyBlueprint2'), + ], + ) + + # tx exists but is not a blueprint + bp_id = self._settings.GENESIS_TX1_HASH.hex() + response = await self.web.get('builtin', { + b'search': bp_id.encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[], + ) + + response = await self.web.get('builtin', { + b'search': b'ff' * 32, + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[], + ) + + async def test_search_by_name(self) -> None: + response = await self.web.get('builtin', { + b'search': b'myblueprint1', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[ + dict(id='11' * 32, name='MyBlueprint1'), + ], + ) + + response = await self.web.get('builtin', { + b'search': b'MyBlueprint2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[ + dict(id='22' * 32, name='MyBlueprint2'), + dict(id='33' * 32, name='MyBlueprint2'), + dict(id='44' * 32, name='MyBlueprint2'), + dict(id='55' * 32, name='MyBlueprint2'), + ], + ) + + response = await self.web.get('builtin', { + b'search': b'Unknown', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[], + ) diff --git a/tests/resources/nanocontracts/test_history.py b/tests/resources/nanocontracts/test_history.py new file mode 100644 index 000000000..b6acf1ede --- /dev/null +++ b/tests/resources/nanocontracts/test_history.py @@ -0,0 +1,252 @@ +from typing import Any + +from cryptography.hazmat.primitives.asymmetric import ec +from twisted.internet.defer import inlineCallbacks + +from hathor.conf import HathorSettings +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.resources import NanoContractHistoryResource +from hathor.nanocontracts.utils import sign_openssl +from hathor.simulator.utils import add_new_block +from hathor.transaction import Transaction +from hathor.transaction.headers import NanoHeader +from tests.resources.base_resource import StubSite, _BaseResourceTest +from tests.utils import add_blocks_unlock_reward, get_genesis_key + +settings = HathorSettings() + + +class MyBlueprint(Blueprint): + a: int + + @public + def initialize(self, ctx: Context, a: int) -> None: + self.a = a + + @public + def set_a(self, ctx: Context, a: int) -> None: + self.a = a + + +class NanoContractHistoryTest(_BaseResourceTest._ResourceTest): + def setUp(self): + super().setUp() + + self.manager = self.create_peer( + 'unittests', + unlock_wallet=True, + wallet_index=True, + nc_indexes=True, + ) + self.tx_storage = self.manager.tx_storage + + self.genesis = self.tx_storage.get_all_genesis() + self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] + self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] + + # read genesis keys + self.genesis_private_key = get_genesis_key() + self.genesis_public_key = self.genesis_private_key.public_key() + + add_blocks_unlock_reward(self.manager) + + self.web = StubSite(NanoContractHistoryResource(self.manager)) + + self.blueprint_id = b'1' * 32 + self.catalog = NCBlueprintCatalog({ + self.blueprint_id: MyBlueprint + }) + self.tx_storage.nc_catalog = self.catalog + self.nc_seqnum = 0 + + @inlineCallbacks + def test_fail_missing_id(self): + response1 = yield self.web.get('history') + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_invalid_id(self): + response1 = yield self.web.get( + 'history', + { + b'id': b'xxx', + } + ) + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_unknown_id(self): + response1 = yield self.web.get( + 'history', + { + b'id': b'0' * 32, + } + ) + self.assertEqual(404, response1.responseCode) + + @inlineCallbacks + def test_fail_not_contract_id(self): + response1 = yield self.web.get( + 'history', + { + b'id': self.genesis_txs[0].hash.hex().encode('ascii'), + } + ) + self.assertEqual(404, response1.responseCode) + + def _fill_nc(self, + nc: Transaction, + nc_id: bytes, + nc_method: str, + nc_args: list[Any], + private_key: ec.EllipticCurvePrivateKeyWithSerialization) -> None: + + method = getattr(MyBlueprint, nc_method) + method_parser = Method.from_callable(method) + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + nano_header = NanoHeader( + tx=nc, + nc_seqnum=self.nc_seqnum, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=[], + ) + nc.headers.append(nano_header) + self.nc_seqnum += 1 + + sign_openssl(nano_header, private_key) + self.manager.cpu_mining_service.resolve(nc) + + def _create_contract(self, parents: list[bytes], timestamp: int) -> Transaction: + nc = Transaction( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=self.tx_storage, + timestamp=timestamp + ) + self._fill_nc(nc, self.blueprint_id, 'initialize', [0], self.genesis_private_key) + self.assertTrue(self.manager.on_new_tx(nc)) + add_new_block(self.manager) + return nc + + @inlineCallbacks + def test_success(self): + parents = [tx.hash for tx in self.genesis_txs] + timestamp = 1 + max(tx.timestamp for tx in self.genesis) + nc1 = self._create_contract(parents, timestamp) + + timestamp += 1 + nc2 = self._create_contract(parents, timestamp) + self.assertNotEqual(nc1.hash, nc2.hash) + + response1 = yield self.web.get( + 'history', + { + b'id': bytes(nc1.hash.hex(), 'utf-8'), + } + ) + data1 = response1.json_value() + self.assertEqual(len(data1['history']), 1) + self.assertEqual(data1['has_more'], False) + self.assertEqual(data1['history'][0]['hash'], nc1.hash.hex()) + self.assertEqual(data1['history'][0]['nc_method'], 'initialize') + + # Now we create a transaction + tx1 = Transaction( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=self.tx_storage, + timestamp=timestamp + ) + self._fill_nc(tx1, nc1.hash, 'set_a', [1], self.genesis_private_key) + self.assertTrue(self.manager.on_new_tx(tx1)) + add_new_block(self.manager) + + # Check both transactions belongs to nc1 history. + response2 = yield self.web.get( + 'history', + { + b'id': nc1.hash.hex().encode('ascii'), + } + ) + data2 = response2.json_value() + self.assertEqual(data2['has_more'], False) + self.assertEqual(len(data2['history']), 2) + ids = [tx['hash'] for tx in data2['history']] + self.assertEqual(ids, [tx1.hash.hex(), nc1.hash.hex()]) + + # Check paging works minimally with after + response2a = yield self.web.get( + 'history', + { + b'id': nc1.hash.hex().encode('ascii'), + b'count': b'1', + b'after': ids[0].encode('ascii'), + } + ) + data2a = response2a.json_value() + self.assertEqual(len(data2a['history']), 1) + self.assertEqual(data2a['has_more'], False) + self.assertEqual(data2a['count'], 1) + self.assertEqual(data2a['after'], ids[0]) + self.assertEqual(data2a['before'], None) + paginated_ids = [tx['hash'] for tx in data2a['history']] + self.assertEqual(paginated_ids, [ids[1]]) + + # Check paging works minimally with before + response2b = yield self.web.get( + 'history', + { + b'id': nc1.hash.hex().encode('ascii'), + b'count': b'1', + b'before': ids[1].encode('ascii'), + } + ) + data2b = response2b.json_value() + self.assertEqual(len(data2b['history']), 1) + self.assertEqual(data2b['has_more'], False) + self.assertEqual(data2b['count'], 1) + self.assertEqual(data2b['after'], None) + self.assertEqual(data2b['before'], ids[1]) + paginated_ids = [tx['hash'] for tx in data2b['history']] + self.assertEqual(paginated_ids, [ids[0]]) + + # Getting the first page only + response2c = yield self.web.get( + 'history', + { + b'id': nc1.hash.hex().encode('ascii'), + b'count': b'1', + } + ) + data2c = response2c.json_value() + self.assertEqual(len(data2c['history']), 1) + self.assertEqual(data2c['has_more'], True) + self.assertEqual(data2c['count'], 1) + self.assertEqual(data2c['after'], None) + self.assertEqual(data2c['before'], None) + paginated_ids = [tx['hash'] for tx in data2c['history']] + self.assertEqual(paginated_ids, [ids[0]]) + + # Make sure nc2 index still has only one tx. + response3 = yield self.web.get( + 'history', + { + b'id': nc2.hash.hex().encode('ascii'), + } + ) + data3 = response3.json_value() + self.assertEqual(data3['has_more'], False) + self.assertEqual(len(data3['history']), 1) + ids = set(tx['hash'] for tx in data3['history']) + self.assertEqual(ids, {nc2.hash.hex()}) diff --git a/tests/resources/nanocontracts/test_nc_creation.py b/tests/resources/nanocontracts/test_nc_creation.py new file mode 100644 index 000000000..52534a09c --- /dev/null +++ b/tests/resources/nanocontracts/test_nc_creation.py @@ -0,0 +1,570 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +from hathor.nanocontracts.resources.nc_creation import NCCreationResource +from hathor.nanocontracts.types import BlueprintId, VertexId +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb +from hathor.transaction import Transaction +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts import test_blueprints +from tests.nanocontracts.test_blueprints.bet import Bet +from tests.resources.base_resource import StubSite, _BaseResourceTest +from tests.utils import get_genesis_key + + +class NCCreationResourceTest(_BaseResourceTest._ResourceTest): + + def setUp(self): + super().setUp() + self.manager = self.create_peer( + 'unittests', + nc_indexes=True, + ) + self.web = StubSite(NCCreationResource(self.manager)) + self.genesis_private_key = get_genesis_key() + self.builtin_bet_blueprint_id = BlueprintId(self.manager.rng.randbytes(32)) + self.manager.tx_storage.nc_catalog.blueprints[self.builtin_bet_blueprint_id] = Bet + + def prepare_ncs(self) -> tuple[Transaction, Transaction, Transaction, Transaction, Transaction]: + bet_code = load_builtin_blueprint_for_ocb('bet.py', 'Bet', test_blueprints) + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..11] + b10 < dummy + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + + ocb2.ocb_private_key = "{private_key}" + ocb2.ocb_password = "{password}" + + nc1.nc_id = ocb2 + nc1.nc_method = initialize() + + nc2.nc_id = "{self.builtin_bet_blueprint_id.hex()}" + nc2.nc_method = initialize("00", "00", 0) + + nc3.nc_id = ocb2 + nc3.nc_method = initialize() + + nc4.nc_id = ocb1 + nc4.nc_method = initialize("00", "00", 0) + + nc5.nc_id = "{self.builtin_bet_blueprint_id.hex()}" + nc5.nc_method = initialize("00", "00", 0) + + ocb1 <-- ocb2 <-- b11 + b11 < nc1 < nc2 < nc3 < nc4 < nc5 + + ocb1.ocb_code = "{bet_code.encode().hex()}" + ocb2.ocb_code = ``` + from hathor.nanocontracts import Blueprint + from hathor.nanocontracts.context import Context + from hathor.nanocontracts.types import public + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + __blueprint__ = MyBlueprint + ``` + ''') + + artifacts.propagate_with(self.manager) + nc1, nc2, nc3, nc4, nc5 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3', 'nc4', 'nc5'], Transaction) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + assert nc4.is_nano_contract() + assert nc5.is_nano_contract() + return nc1, nc2, nc3, nc4, nc5 + + def nc_to_response_item(self, nc: Transaction) -> dict[str, Any]: + assert nc.storage is not None + assert nc.is_nano_contract() + nano_header = nc.get_nano_header() + blueprint_id = BlueprintId(VertexId(nano_header.nc_id)) + blueprint_class = nc.storage.get_blueprint_class(blueprint_id) + return dict( + nano_contract_id=nc.hash_hex, + blueprint_id=blueprint_id.hex(), + blueprint_name=blueprint_class.__name__, + last_tx_timestamp=nc.timestamp, + total_txs=1, + created_at=nc.timestamp, + ) + + async def test_success(self) -> None: + nc1, nc2, nc3, nc4, nc5 = self.prepare_ncs() + response = await self.web.get('creation') + data = response.json_value() + + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc5), + self.nc_to_response_item(nc4), + self.nc_to_response_item(nc3), + self.nc_to_response_item(nc2), + self.nc_to_response_item(nc1), + ], + ) + + async def test_tx_aggregation(self) -> None: + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + ocb1.ocb_code = test_blueprint1.py, TestBlueprint1 + + ocb2.ocb_private_key = "{private_key}" + ocb2.ocb_password = "{password}" + ocb2.ocb_code = test_blueprint1.py, TestBlueprint1 + + nc1.nc_id = ocb1 + nc1.nc_method = initialize(0) + + nc2.nc_id = ocb2 + nc2.nc_method = initialize(0) + + nc3.nc_id = nc2 + nc3.nc_method = nop() + + nc4.nc_id = nc1 + nc4.nc_method = nop() + + nc5.nc_id = nc2 + nc5.nc_method = nop() + + nc6.nc_id = nc2 + nc6.nc_method = nop() + + nc7.nc_id = nc1 + nc7.nc_method = nop() + + ocb1 <-- ocb2 <-- b11 + b11 < nc1 < nc2 < nc3 < nc4 < nc5 < nc6 < nc7 + + nc1 <-- nc2 <-- nc3 <-- b12 + ''') + + artifacts.propagate_with(self.manager) + nc1, nc2, nc6, nc7 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc6', 'nc7'], Transaction) + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc6.is_nano_contract() + assert nc7.is_nano_contract() + response = await self.web.get('creation') + data = response.json_value() + + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[ + dict( + nano_contract_id=nc2.hash_hex, + blueprint_id=nc2.get_nano_header().nc_id.hex(), + blueprint_name='TestBlueprint1', + last_tx_timestamp=nc6.timestamp, + total_txs=4, + created_at=nc2.timestamp, + ), + dict( + nano_contract_id=nc1.hash_hex, + blueprint_id=nc1.get_nano_header().nc_id.hex(), + blueprint_name='TestBlueprint1', + last_tx_timestamp=nc7.timestamp, + total_txs=3, + created_at=nc1.timestamp, + ) + ], + ) + + async def test_pagination(self) -> None: + nc1, nc2, nc3, nc4, nc5 = self.prepare_ncs() + response = await self.web.get('creation', { + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=2, + has_more=True, + nc_creation_txs=[ + self.nc_to_response_item(nc5), + self.nc_to_response_item(nc4), + ], + ) + + response = await self.web.get('creation', { + b'after': nc4.hash_hex.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=nc4.hash_hex, + count=2, + has_more=True, + nc_creation_txs=[ + self.nc_to_response_item(nc3), + self.nc_to_response_item(nc2), + ], + ) + + response = await self.web.get('creation', { + b'after': nc2.hash_hex.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=nc2.hash_hex, + count=2, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc1), + ], + ) + + response = await self.web.get('creation', { + b'after': nc1.hash_hex.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=nc1.hash_hex, + count=2, + has_more=False, + nc_creation_txs=[], + ) + + response = await self.web.get('creation', { + b'before': nc1.hash_hex.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=nc1.hash_hex, + after=None, + count=2, + has_more=True, + nc_creation_txs=[ + self.nc_to_response_item(nc2), + self.nc_to_response_item(nc3), + ], + ) + + response = await self.web.get('creation', { + b'before': nc3.hash_hex.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=nc3.hash_hex, + after=None, + count=2, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc4), + self.nc_to_response_item(nc5), + ], + ) + + async def test_pagination_asc(self) -> None: + nc1, nc2, nc3, nc4, nc5 = self.prepare_ncs() + response = await self.web.get('creation', { + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=2, + has_more=True, + nc_creation_txs=[ + self.nc_to_response_item(nc1), + self.nc_to_response_item(nc2), + ], + ) + + response = await self.web.get('creation', { + b'after': nc2.hash_hex.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=nc2.hash_hex, + count=2, + has_more=True, + nc_creation_txs=[ + self.nc_to_response_item(nc3), + self.nc_to_response_item(nc4), + ], + ) + + response = await self.web.get('creation', { + b'after': nc4.hash_hex.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=nc4.hash_hex, + count=2, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc5), + ], + ) + + response = await self.web.get('creation', { + b'after': nc5.hash_hex.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=nc5.hash_hex, + count=2, + has_more=False, + nc_creation_txs=[], + ) + + response = await self.web.get('creation', { + b'before': nc5.hash_hex.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=nc5.hash_hex, + after=None, + count=2, + has_more=True, + nc_creation_txs=[ + self.nc_to_response_item(nc4), + self.nc_to_response_item(nc3), + ], + ) + + response = await self.web.get('creation', { + b'before': nc3.hash_hex.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=nc3.hash_hex, + after=None, + count=2, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc2), + self.nc_to_response_item(nc1), + ], + ) + + async def test_search_by_nc_id(self) -> None: + nc1, nc2, nc3, nc4, nc5 = self.prepare_ncs() + response = await self.web.get('on_chain', { + b'search': nc3.hash_hex.encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc3), + ], + ) + + async def test_search_by_blueprint_id(self) -> None: + nc1, nc2, nc3, nc4, nc5 = self.prepare_ncs() + response = await self.web.get('on_chain', { + b'search': nc1.get_nano_header().nc_id.hex().encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc3), + self.nc_to_response_item(nc1), + ], + ) + + response = await self.web.get('on_chain', { + b'search': nc2.get_nano_header().nc_id.hex().encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc5), + self.nc_to_response_item(nc2), + ], + ) + + response = await self.web.get('on_chain', { + b'search': nc4.get_nano_header().nc_id.hex().encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc4), + ], + ) + + async def test_search_by_blueprint_id_with_pagination(self) -> None: + nc1, nc2, nc3, nc4, nc5 = self.prepare_ncs() + nc1_nano_header = nc1.get_nano_header() + response = await self.web.get('on_chain', { + b'search': nc1_nano_header.nc_id.hex().encode(), + b'count': b'1', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=1, + has_more=True, + nc_creation_txs=[ + self.nc_to_response_item(nc3), + ], + ) + + response = await self.web.get('on_chain', { + b'search': nc1_nano_header.nc_id.hex().encode(), + b'count': b'1', + b'after': nc3.hash_hex.encode() + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=nc3.hash_hex, + count=1, + has_more=False, + nc_creation_txs=[ + self.nc_to_response_item(nc1), + ], + ) + + async def test_search_non_existent(self) -> None: + self.prepare_ncs() + response = await self.web.get('on_chain', { + b'search': self._settings.GENESIS_BLOCK_HASH.hex().encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[], + ) + + response = await self.web.get('on_chain', { + b'search': b'fe' * 32, + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + nc_creation_txs=[], + ) + + async def test_search_non_hex(self) -> None: + self.prepare_ncs() + response = await self.web.get('builtin', { + b'search': b'abc', + }) + data = response.json_value() + assert data == dict( + success=True, + count=10, + before=None, + after=None, + has_more=False, + nc_creation_txs=[], + ) + + async def test_non_hex_pagination(self) -> None: + self.prepare_ncs() + response = await self.web.get('creation', { + b'after': b'abc', + b'count': b'2', + }) + data = response.json_value() + assert response.responseCode == 400 + assert data == dict( + success=False, + error='Invalid "before" or "after": abc' + ) diff --git a/tests/resources/nanocontracts/test_nc_exec_logs.py b/tests/resources/nanocontracts/test_nc_exec_logs.py new file mode 100644 index 000000000..e36b1f0cd --- /dev/null +++ b/tests/resources/nanocontracts/test_nc_exec_logs.py @@ -0,0 +1,214 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest.mock import ANY + +from hathor.nanocontracts.resources.nc_exec_logs import NCExecLogsResource +from hathor.transaction import Block, Transaction +from tests.nanocontracts.test_nc_exec_logs import MY_BLUEPRINT1_ID, BaseNCExecLogs +from tests.resources.base_resource import StubSite + + +class NCExecLogsResourceTest(BaseNCExecLogs): + __test__ = True + + def setUp(self): + super().setUp() + self._prepare() + self.web = StubSite(NCExecLogsResource(self.manager)) + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..2] + blockchain b1 a[2..3] + b1 < dummy + b2 < a2 + + nc1.nc_id = "{MY_BLUEPRINT1_ID.hex()}" + nc1.nc_method = initialize() + + nc1 <-- b2 + nc1 <-- a2 + """) + + for _, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + self.nc1 = artifacts.get_typed_vertex('nc1', Transaction) + assert self.nc1.is_nano_contract() + self.b2, self.a2 = artifacts.get_typed_vertices(['b2', 'a2'], Block) + + async def test_missing_id(self) -> None: + response = await self.web.get('logs') + data = response.json_value() + assert response.responseCode == 400 + assert not data['success'] + + async def test_invalid_id(self) -> None: + response = await self.web.get('logs', { + b'id': b'a', + }) + data = response.json_value() + assert response.responseCode == 400 + assert data == dict( + success=False, + error='Invalid id: a' + ) + + async def test_tx_not_found(self) -> None: + response = await self.web.get('logs', { + b'id': b'aa', + }) + data = response.json_value() + assert response.responseCode == 404 + assert data == dict( + success=False, + error='NC "aa" not found.' + ) + + async def test_nc_not_found(self) -> None: + genesis_hash = self._settings.GENESIS_TX1_HASH.hex() + response = await self.web.get('logs', { + b'id': genesis_hash.encode() + }) + data = response.json_value() + assert response.responseCode == 404 + assert data == dict( + success=False, + error=f'NC "{genesis_hash}" not found.' + ) + + async def test_invalid_log_level(self) -> None: + response = await self.web.get('logs', { + b'id': self.nc1.hash_hex.encode(), + b'log_level': b'UNKNOWN' + }) + data = response.json_value() + assert response.responseCode == 400 + assert data == dict( + success=False, + error='Invalid log level: UNKNOWN' + ) + + async def test_success(self) -> None: + response = await self.web.get('logs', { + b'id': self.nc1.hash_hex.encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + nc_id=self.nc1.get_nano_header().get_contract_id().hex(), + nc_execution='success', + logs={ + self.a2.hash_hex: [ + dict( + error_traceback=None, + logs=[ + dict( + type='CALL_BEGIN', + level='DEBUG', + nc_id=self.nc1.hash_hex, + call_type='public', + method_name='initialize', + str_args='()', + timestamp=ANY, + actions=[], + ), + dict( + type='LOG', + level='INFO', + message='initialize() called on MyBlueprint1', + key_values={}, + timestamp=ANY, + ), + dict( + type='CALL_END', + level='DEBUG', + timestamp=ANY, + ) + ], + ), + ], + }, + ) + + async def test_all_execs(self) -> None: + response = await self.web.get('logs', { + b'id': self.nc1.hash_hex.encode(), + b'all_execs': b'true' + }) + data = response.json_value() + + expected_initialize_call_logs = [ + dict( + type='CALL_BEGIN', + level='DEBUG', + nc_id=self.nc1.hash_hex, + call_type='public', + method_name='initialize', + str_args='()', + timestamp=ANY, + actions=[], + ), + dict( + type='LOG', + level='INFO', + message='initialize() called on MyBlueprint1', + key_values={}, + timestamp=ANY, + ), + dict( + type='CALL_END', + level='DEBUG', + timestamp=ANY, + ) + ] + + assert data == dict( + success=True, + nc_id=self.nc1.get_nano_header().get_contract_id().hex(), + nc_execution='success', + logs={ + self.b2.hash_hex: [ + dict( + error_traceback=None, + logs=expected_initialize_call_logs, + ), + ], + self.a2.hash_hex: [ + dict( + error_traceback=None, + logs=expected_initialize_call_logs, + ), + ], + }, + ) + + async def test_filter_log_level(self) -> None: + response = await self.web.get('logs', { + b'id': self.nc1.hash_hex.encode(), + b'log_level': b'ERROR' + }) + data = response.json_value() + assert data == dict( + success=True, + nc_id=self.nc1.get_nano_header().get_contract_id().hex(), + nc_execution='success', + logs={ + self.a2.hash_hex: [ + dict( + error_traceback=None, + logs=[], + ), + ], + }, + ) diff --git a/tests/resources/nanocontracts/test_on_chain.py b/tests/resources/nanocontracts/test_on_chain.py new file mode 100644 index 000000000..0a1f59265 --- /dev/null +++ b/tests/resources/nanocontracts/test_on_chain.py @@ -0,0 +1,427 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.resources.on_chain import BlueprintOnChainResource +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts import test_blueprints +from tests.resources.base_resource import StubSite, _BaseResourceTest + + +class BlueprintOnChainResourceTest(_BaseResourceTest._ResourceTest): + + def setUp(self): + super().setUp() + self.manager = self.create_peer( + 'unittests', + nc_indexes=True, + ) + self.web = StubSite(BlueprintOnChainResource(self.manager)) + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + def propagate_ocbs(self) -> list[OnChainBlueprint]: + bet_code = load_builtin_blueprint_for_ocb('bet.py', 'Bet', test_blueprints) + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..11] + b10 < dummy + + ocb1.ocb_private_key = "{private_key}" + ocb2.ocb_private_key = "{private_key}" + ocb3.ocb_private_key = "{private_key}" + ocb4.ocb_private_key = "{private_key}" + ocb5.ocb_private_key = "{private_key}" + + ocb1.ocb_password = "{password}" + ocb2.ocb_password = "{password}" + ocb3.ocb_password = "{password}" + ocb4.ocb_password = "{password}" + ocb5.ocb_password = "{password}" + + ocb1.ocb_code = "{bet_code.encode().hex()}" + ocb2.ocb_code = "{bet_code.encode().hex()}" + ocb3.ocb_code = "{bet_code.encode().hex()}" + ocb4.ocb_code = "{bet_code.encode().hex()}" + ocb5.ocb_code = "{bet_code.encode().hex()}" + + ocb1 <-- ocb2 <-- ocb3 <-- ocb4 <-- ocb5 <-- b11 + """) + + artifacts.propagate_with(self.manager) + return artifacts.get_typed_vertices(['ocb1', 'ocb2', 'ocb3', 'ocb4', 'ocb5'], OnChainBlueprint) + + def blueprint_tx_to_response(self, bp_tx: OnChainBlueprint, *, name: str = 'Bet') -> dict[str, Any]: + return dict( + id=bp_tx.blueprint_id().hex(), + name=name, + created_at=bp_tx.timestamp + ) + + async def test_success(self) -> None: + # test when there are no OCBs + response = await self.web.get('on_chain') + data = response.json_value() + + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[], + ) + + ocbs = self.propagate_ocbs() + response = await self.web.get('on_chain') + data = response.json_value() + expected_bps = [self.blueprint_tx_to_response(ocb)for ocb in reversed(ocbs)] + + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=expected_bps, + ) + + async def test_ocb_not_confirmed(self) -> None: + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + artifacts = self.dag_builder.build_from_str(f""" + blockchain genesis b[1..11] + b10 < dummy + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + ocb1.ocb_code = test_blueprint1.py, TestBlueprint1 + + ocb2.ocb_private_key = "{private_key}" + ocb2.ocb_password = "{password}" + ocb2.ocb_code = test_blueprint1.py, TestBlueprint1 + + ocb1 <-- b11 + """) + + artifacts.propagate_with(self.manager) + ocb1 = artifacts.get_typed_vertex('ocb1', OnChainBlueprint) + + response = await self.web.get('on_chain') + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[ + self.blueprint_tx_to_response(ocb1, name='TestBlueprint1') + ], + ) + + async def test_pagination(self) -> None: + ocbs = self.propagate_ocbs() + response = await self.web.get('on_chain', { + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=2, + has_more=True, + blueprints=[ + self.blueprint_tx_to_response(ocbs[4]), + self.blueprint_tx_to_response(ocbs[3]), + ], + ) + + after = ocbs[3].hash_hex + response = await self.web.get('on_chain', { + b'after': after.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=True, + blueprints=[ + self.blueprint_tx_to_response(ocbs[2]), + self.blueprint_tx_to_response(ocbs[1]), + ], + ) + + after = ocbs[1].hash_hex + response = await self.web.get('on_chain', { + b'after': after.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=False, + blueprints=[ + self.blueprint_tx_to_response(ocbs[0]), + ], + ) + + after = ocbs[0].hash_hex + response = await self.web.get('on_chain', { + b'after': after.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=False, + blueprints=[], + ) + + before = ocbs[0].hash_hex + response = await self.web.get('on_chain', { + b'before': before.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=True, + blueprints=[ + self.blueprint_tx_to_response(ocbs[1]), + self.blueprint_tx_to_response(ocbs[2]), + ], + ) + + before = ocbs[2].hash_hex + response = await self.web.get('on_chain', { + b'before': before.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=False, + blueprints=[ + self.blueprint_tx_to_response(ocbs[3]), + self.blueprint_tx_to_response(ocbs[4]), + ], + ) + + before = ocbs[4].hash_hex + response = await self.web.get('on_chain', { + b'before': before.encode(), + b'count': b'2', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=False, + blueprints=[], + ) + + async def test_pagination_asc(self) -> None: + ocbs = self.propagate_ocbs() + response = await self.web.get('on_chain', { + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=2, + has_more=True, + blueprints=[ + self.blueprint_tx_to_response(ocbs[0]), + self.blueprint_tx_to_response(ocbs[1]), + ], + ) + + after = ocbs[1].hash_hex + response = await self.web.get('on_chain', { + b'after': after.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=True, + blueprints=[ + self.blueprint_tx_to_response(ocbs[2]), + self.blueprint_tx_to_response(ocbs[3]), + ], + ) + + after = ocbs[3].hash_hex + response = await self.web.get('on_chain', { + b'after': after.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=False, + blueprints=[ + self.blueprint_tx_to_response(ocbs[4]), + ], + ) + + after = ocbs[4].hash_hex + response = await self.web.get('on_chain', { + b'after': after.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=after, + count=2, + has_more=False, + blueprints=[], + ) + + before = ocbs[4].hash_hex + response = await self.web.get('on_chain', { + b'before': before.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=True, + blueprints=[ + self.blueprint_tx_to_response(ocbs[3]), + self.blueprint_tx_to_response(ocbs[2]), + ], + ) + + before = ocbs[2].hash_hex + response = await self.web.get('on_chain', { + b'before': before.encode(), + b'count': b'2', + b'order': b'asc', + }) + data = response.json_value() + assert data == dict( + success=True, + before=before, + after=None, + count=2, + has_more=False, + blueprints=[ + self.blueprint_tx_to_response(ocbs[1]), + self.blueprint_tx_to_response(ocbs[0]), + ], + ) + + async def test_search_by_bp_id(self) -> None: + ocbs = self.propagate_ocbs() + some_bp_tx = ocbs[2] + response = await self.web.get('on_chain', { + b'search': some_bp_tx.hash_hex.encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[ + self.blueprint_tx_to_response(some_bp_tx), + ], + ) + + # tx exists but is not a blueprint + bp_id = self._settings.GENESIS_TX1_HASH.hex() + response = await self.web.get('builtin', { + b'search': bp_id.encode(), + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[], + ) + + response = await self.web.get('on_chain', { + b'search': b'ff' * 32, + }) + data = response.json_value() + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[], + ) + + async def test_search_by_name(self) -> None: + response = await self.web.get('builtin', { + b'search': b'Bet', + }) + data = response.json_value() + # it's not implemented so it returns empty + assert data == dict( + success=True, + before=None, + after=None, + count=10, + has_more=False, + blueprints=[], + ) diff --git a/tests/resources/nanocontracts/test_state.py b/tests/resources/nanocontracts/test_state.py new file mode 100644 index 000000000..de5f86312 --- /dev/null +++ b/tests/resources/nanocontracts/test_state.py @@ -0,0 +1,512 @@ +import hashlib +import math +from typing import Any, NamedTuple, Optional, TypeAlias + +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ec +from twisted.internet.defer import inlineCallbacks + +from hathor.conf import HathorSettings +from hathor.crypto.util import decode_address, get_address_b58_from_bytes, get_public_key_bytes_compressed +from hathor.nanocontracts import Blueprint, Context, public, view +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.resources import NanoContractStateResource +from hathor.nanocontracts.types import Address, NCActionType, NCDepositAction, Timestamp, TokenUid +from hathor.nanocontracts.utils import sign_openssl +from hathor.simulator.utils import add_new_block +from hathor.transaction import Transaction, TxInput +from hathor.transaction.headers import NanoHeader +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.scripts import P2PKH +from tests.resources.base_resource import StubSite, _BaseResourceTest +from tests.utils import add_blocks_unlock_reward, get_genesis_key + +settings = HathorSettings() + +Amount: TypeAlias = int + + +class MyNamedTuple(NamedTuple): + amount1: int + amount2: int + address: Optional[Address] + + +class MyBlueprint(Blueprint): + token_uid: TokenUid + total: Amount + date_last_bet: Timestamp + address_details: dict[Address, dict[str, Amount]] + bytes_field: bytes + dict_with_bytes: dict[bytes, str] + + @public + def initialize(self, ctx: Context, token_uid: TokenUid, date_last_bet: Timestamp) -> None: + self.token_uid = token_uid + self.date_last_bet = date_last_bet + self.total = 0 + + @public(allow_deposit=True) + def bet(self, ctx: Context, address: Address, score: str) -> None: + action = ctx.get_single_action(self.token_uid) + assert isinstance(action, NCDepositAction) + self.total += action.amount + partial = self.address_details.get(address, {}) + if score not in partial: + partial[score] = action.amount + else: + partial[score] += action.amount + self.address_details[address] = partial + + encoded_score = score.encode() + self.bytes_field = encoded_score + self.dict_with_bytes[encoded_score] = score + + @view + def has_result(self) -> bool: + return False + + @view + def add(self, a: int, b: int) -> int: + return a + b + + @view + def conditional_add(self, test_tuple: MyNamedTuple) -> Optional[int]: + """A method only for testing that sums amount1 + amount2, in case + the address is equal to WewDeXWyvHP7jJTs7tjLoQfoB72LLxJQqN + """ + conditional_address = 'WewDeXWyvHP7jJTs7tjLoQfoB72LLxJQqN' + if test_tuple.address and get_address_b58_from_bytes(test_tuple.address) == conditional_address: + return test_tuple.amount1 + test_tuple.amount2 + + return None + + @view + def multiply(self, elements: list[int]) -> int: + return math.prod(elements) + + @view + def conditional_multiply_bytes(self, t: tuple[int, Optional[bytes]]) -> Optional[bytes]: + multiplier = t[0] + data = t[1] + if not data: + return None + + return multiplier * data + + +class BaseNanoContractStateTest(_BaseResourceTest._ResourceTest): + def setUp(self): + super().setUp() + + self.manager = self.create_peer('unittests', unlock_wallet=True, wallet_index=True) + self.tx_storage = self.manager.tx_storage + + self.genesis = self.tx_storage.get_all_genesis() + self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] + self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] + + # read genesis keys + self.genesis_private_key = get_genesis_key() + self.genesis_public_key = self.genesis_private_key.public_key() + + *_, self.last_block = add_blocks_unlock_reward(self.manager) + + self.web = StubSite(NanoContractStateResource(self.manager)) + + self.bet_id = bytes.fromhex('3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595') + self.catalog = NCBlueprintCatalog({ + self.bet_id: MyBlueprint + }) + + self.tx_storage.nc_catalog = self.catalog + self.nc_seqnum = 0 + + @inlineCallbacks + def test_fail_missing_id(self): + response1 = yield self.web.get('state') + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_invalid_id(self): + response1 = yield self.web.get('state', { + b'id': b'xxx', + }) + self.assertEqual(400, response1.responseCode) + + @inlineCallbacks + def test_fail_unknown_id(self): + response1 = yield self.web.get('history', { + b'id': b'0' * 32, + }) + self.assertEqual(404, response1.responseCode) + + @inlineCallbacks + def test_fail_not_contract_id(self): + response1 = yield self.web.get('history', { + b'id': self.genesis_txs[0].hash.hex().encode('ascii'), + }) + self.assertEqual(404, response1.responseCode) + + def _fill_nc( + self, + nc: Transaction, + nc_id: bytes, + nc_method: str, + nc_args: list[Any], + private_key: ec.EllipticCurvePrivateKeyWithSerialization, + *, + nc_actions: list[NanoHeaderAction] | None = None + ) -> None: + + method_parser = Method.from_callable(getattr(MyBlueprint, nc_method)) + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + nano_header = NanoHeader( + tx=nc, + nc_seqnum=self.nc_seqnum, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=nc_actions or [], + ) + nc.headers.append(nano_header) + self.nc_seqnum += 1 + + sign_openssl(nano_header, private_key) + self.manager.cpu_mining_service.resolve(nc) + + @inlineCallbacks + def test_success(self): + parents = [tx.hash for tx in self.genesis_txs] + timestamp = 1 + max(tx.timestamp for tx in self.genesis) + + date_last_bet = 1699579721 + # Create bet nano contract + nc = Transaction( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=self.tx_storage, + timestamp=timestamp + ) + self._fill_nc( + nc, + self.bet_id, + 'initialize', + [settings.HATHOR_TOKEN_UID, date_last_bet], + self.genesis_private_key, + ) + self.assertTrue(self.manager.on_new_tx(nc)) + + # Before the execution we can't get the state + response0 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + ] + ) + self.assertEqual(404, response0.responseCode) + # Execute the nano contract + block1 = add_new_block(self.manager) + + response1 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'fields[]', b'total'), + (b'fields[]', b'date_last_bet'), + (b'balances[]', settings.HATHOR_TOKEN_UID.hex().encode('ascii')), + (b'calls[]', b'has_result()'), + (b'calls[]', b'unknown_method()'), + (b'calls[]', b'add(5, 12)'), + (b'calls[]', b'conditional_add([2, 4, null])'), + (b'calls[]', b'conditional_add([2, 4, "WewDeXWyvHP7jJTs7tjLoQfoB72LLxJQqN"])'), + (b'calls[]', b'multiply([2, 5, 8, 10])'), + (b'calls[]', b'conditional_multiply_bytes([5, "01"])'), + (b'calls[]', b'conditional_multiply_bytes([3, null])'), + ] + ) + data1 = response1.json_value() + fields1 = data1['fields'] + self.assertEqual(data1['blueprint_id'], self.bet_id.hex()) + self.assertEqual(data1['blueprint_name'], 'MyBlueprint') + self.assertEqual(fields1['token_uid'], {'value': settings.HATHOR_TOKEN_UID.hex()}) + self.assertEqual(fields1['total'], {'value': 0}) + self.assertEqual(fields1['date_last_bet'], {'value': date_last_bet}) + balances1 = data1['balances'] + self.assertEqual( + balances1, + {settings.HATHOR_TOKEN_UID.hex(): {'value': '0', 'can_mint': False, 'can_melt': False}} + ) + calls1 = data1['calls'] + self.assertEqual(calls1, { + 'has_result()': {'value': False}, + 'unknown_method()': {'errmsg': "NCMethodNotFound('MyBlueprint.unknown_method')"}, + 'add(5, 12)': {'value': 17}, + 'conditional_add([2, 4, null])': {'value': None}, + 'conditional_add([2, 4, "WewDeXWyvHP7jJTs7tjLoQfoB72LLxJQqN"])': {'value': 6}, + 'multiply([2, 5, 8, 10])': {'value': 800}, + 'conditional_multiply_bytes([5, "01"])': {'value': '0101010101'}, + 'conditional_multiply_bytes([3, null])': {'value': None} + }) + + # Now we create a deposit in the nano contract with the genesis output + inputs = [TxInput(self.genesis_blocks[0].hash, 0, b'')] + address_b58 = self.genesis_blocks[0].outputs[0].to_human_readable()['address'] + nc_bet = Transaction( + weight=1, + inputs=inputs, + outputs=[], + parents=parents, + storage=self.tx_storage, + timestamp=timestamp + ) + bet_result = '1x0' + self._fill_nc( + nc_bet, + nc.hash, + 'bet', + [decode_address(address_b58), bet_result], + self.genesis_private_key, + nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=self.genesis_blocks[0].outputs[0].value, + ) + ] + ) + + data_to_sign = nc_bet.get_sighash_all() + public_key_bytes = get_public_key_bytes_compressed(self.genesis_public_key) + hashed_data = hashlib.sha256(data_to_sign).digest() + signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) + nc_bet.inputs[0].data = P2PKH.create_input_data(public_key_bytes, signature) + + self.manager.cpu_mining_service.resolve(nc_bet) + # Add to DAG. + self.assertTrue(self.manager.on_new_tx(nc_bet)) + # Execute the deposit + block2 = add_new_block(self.manager) + + address_param = "address_details.a'{}'".format(address_b58) + dict_with_bytes_param = "dict_with_bytes.b'{}'".format(bet_result.encode().hex()) + response2 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'fields[]', b'total'), + (b'fields[]', b'date_last_bet'), + (b'fields[]', address_param.encode()), + (b'fields[]', b'bytes_field'), + (b'fields[]', dict_with_bytes_param.encode()), + (b'balances[]', settings.HATHOR_TOKEN_UID.hex().encode('ascii')), + ] + ) + data2 = response2.json_value() + fields2 = data2['fields'] + self.assertEqual(data2['blueprint_id'], self.bet_id.hex()) + self.assertEqual(data2['blueprint_name'], 'MyBlueprint') + self.assertEqual(fields2['token_uid'], {'value': settings.HATHOR_TOKEN_UID.hex()}) + self.assertEqual(fields2['total'], {'value': 10**11}) + self.assertEqual(fields2['date_last_bet'], {'value': date_last_bet}) + self.assertEqual(len(fields2[address_param]), 1) + # TODO: RE-IMPLEMENT SUPPORT FOR THIS + # FIXME + self.assertEqual(fields2[address_param], {'errmsg': 'not a blueprint field'}) + # self.assertEqual(fields2[address_param], {'value': {'1x0': 10**11}}) + self.assertEqual(fields2['bytes_field'], {'value': bet_result.encode().hex()}) + # FIXME + self.assertEqual(fields2[dict_with_bytes_param], {'errmsg': 'not a blueprint field'}) + # self.assertEqual(fields2[dict_with_bytes_param], {'value': '1x0'}) + balances2 = data2['balances'] + self.assertEqual( + balances2, + {settings.HATHOR_TOKEN_UID.hex(): {'value': '100000000000', 'can_mint': False, 'can_melt': False}} + ) + + # Test __all__ balance + response3 = yield self.web.get( + 'state', + { + b'id': nc.hash.hex().encode('ascii'), + b'balances[]': '__all__'.encode('ascii'), + } + ) + data3 = response3.json_value() + self.assertEqual(data3['blueprint_id'], self.bet_id.hex()) + self.assertEqual(data3['blueprint_name'], 'MyBlueprint') + balances3 = data3['balances'] + self.assertEqual( + balances3, + {settings.HATHOR_TOKEN_UID.hex(): {'value': '100000000000', 'can_mint': False, 'can_melt': False}} + ) + + # Test getting the state in a previous block + # With block hash + response4 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'fields[]', b'total'), + (b'fields[]', b'date_last_bet'), + (b'fields[]', address_param.encode()), + (b'balances[]', settings.HATHOR_TOKEN_UID.hex().encode('ascii')), + (b'block_hash', block1.hash.hex().encode('ascii')), + ] + ) + data4 = response4.json_value() + fields4 = data4['fields'] + self.assertEqual(data4['blueprint_id'], self.bet_id.hex()) + self.assertEqual(data4['blueprint_name'], 'MyBlueprint') + self.assertEqual(fields4['token_uid'], {'value': settings.HATHOR_TOKEN_UID.hex()}) + self.assertEqual(fields4['total'], {'value': 0}) + self.assertEqual(fields4['date_last_bet'], {'value': date_last_bet}) + self.assertEqual(fields4[address_param].get('value'), None) + balances4 = data4['balances'] + self.assertEqual( + balances4, + {settings.HATHOR_TOKEN_UID.hex(): {'value': '0', 'can_mint': False, 'can_melt': False}} + ) + + # With block height + response5 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'fields[]', b'total'), + (b'fields[]', b'date_last_bet'), + (b'fields[]', address_param.encode()), + (b'balances[]', settings.HATHOR_TOKEN_UID.hex().encode('ascii')), + (b'block_height', str(block1.static_metadata.height).encode('ascii')), + ] + ) + data5 = response5.json_value() + fields5 = data5['fields'] + self.assertEqual(data5['blueprint_id'], self.bet_id.hex()) + self.assertEqual(data5['blueprint_name'], 'MyBlueprint') + self.assertEqual(fields5['token_uid'], {'value': settings.HATHOR_TOKEN_UID.hex()}) + self.assertEqual(fields5['total'], {'value': 0}) + self.assertEqual(fields5['date_last_bet'], {'value': date_last_bet}) + self.assertEqual(fields5[address_param].get('value'), None) + balances5 = data5['balances'] + self.assertEqual( + balances5, + {settings.HATHOR_TOKEN_UID.hex(): {'value': '0', 'can_mint': False, 'can_melt': False}} + ) + + # With block2.timestamp, should get block2 state + response6 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'fields[]', b'total'), + (b'fields[]', b'date_last_bet'), + (b'fields[]', address_param.encode()), + (b'balances[]', settings.HATHOR_TOKEN_UID.hex().encode('ascii')), + (b'timestamp', str(block2.timestamp).encode('ascii')), + ] + ) + data6 = response6.json_value() + fields6 = data6['fields'] + self.assertEqual(data6['blueprint_id'], self.bet_id.hex()) + self.assertEqual(data6['blueprint_name'], 'MyBlueprint') + self.assertEqual(fields6['token_uid'], {'value': settings.HATHOR_TOKEN_UID.hex()}) + self.assertEqual(fields6['total'], {'value': 10**11}) + self.assertEqual(fields6['date_last_bet'], {'value': date_last_bet}) + self.assertEqual(fields6[address_param].get('value'), None) + balances6 = data6['balances'] + self.assertEqual( + balances6, + {settings.HATHOR_TOKEN_UID.hex(): {'value': '100000000000', 'can_mint': False, 'can_melt': False}} + ) + + # With block2.timestamp - 1, should get block1 state + response7 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'fields[]', b'total'), + (b'fields[]', b'date_last_bet'), + (b'fields[]', address_param.encode()), + (b'balances[]', settings.HATHOR_TOKEN_UID.hex().encode('ascii')), + (b'timestamp', str(block2.timestamp - 1).encode('ascii')), + ] + ) + data7 = response7.json_value() + fields7 = data7['fields'] + self.assertEqual(data7['blueprint_id'], self.bet_id.hex()) + self.assertEqual(data7['blueprint_name'], 'MyBlueprint') + self.assertEqual(fields7['token_uid'], {'value': settings.HATHOR_TOKEN_UID.hex()}) + self.assertEqual(fields7['total'], {'value': 0}) + self.assertEqual(fields7['date_last_bet'], {'value': date_last_bet}) + self.assertEqual(fields7[address_param].get('value'), None) + balances7 = data7['balances'] + self.assertEqual( + balances7, + {settings.HATHOR_TOKEN_UID.hex(): {'value': '0', 'can_mint': False, 'can_melt': False}} + ) + + # With block1.timestamp - 1, the contract doesn't exist + response7 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'fields[]', b'total'), + (b'fields[]', b'date_last_bet'), + (b'fields[]', address_param.encode()), + (b'balances[]', settings.HATHOR_TOKEN_UID.hex().encode('ascii')), + (b'timestamp', str(block1.timestamp - 1).encode('ascii')), + ] + ) + self.assertEqual(response7.responseCode, 404) + data7 = response7.json_value() + self.assertEqual(data7['error'], f'Nano contract does not exist at block {self.last_block.hash_hex}.') + + # Validate errors using block_hash / block_height + + # Both parameters can't be used together + response8 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'block_height', str(block1.static_metadata.height).encode('ascii')), + (b'block_hash', block1.hash.hex().encode('ascii')), + ] + ) + self.assertEqual(400, response8.responseCode) + + # block_height does not exist + response9 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'block_height', str(block1.static_metadata.height + 5).encode('ascii')), + ] + ) + self.assertEqual(400, response9.responseCode) + + # invalid block_hash does not exist + response10 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'block_hash', '123'.encode('ascii')), + ] + ) + self.assertEqual(400, response10.responseCode) + + # block_hash is a tx + response11 = yield self.web.get( + 'state', [ + (b'id', nc.hash.hex().encode('ascii')), + (b'fields[]', b'token_uid'), + (b'block_hash', nc_bet.hash.hex().encode('ascii')), + ] + ) + self.assertEqual(400, response11.responseCode) diff --git a/tests/resources/p2p/test_status.py b/tests/resources/p2p/test_status.py index 44da55eab..0d21c0665 100644 --- a/tests/resources/p2p/test_status.py +++ b/tests/resources/p2p/test_status.py @@ -3,7 +3,6 @@ from twisted.internet.defer import inlineCallbacks import hathor -from hathor.conf.unittests import SETTINGS from hathor.p2p.peer_endpoint import PeerAddress from hathor.p2p.resources import StatusResource from hathor.simulator import FakeConnection @@ -44,14 +43,14 @@ def test_get(self): self.assertIn('height', dag_data['best_block_tips'][0]) self.assertIsInstance(dag_data['best_block_tips'][0]['hash'], str) self.assertIsInstance(dag_data['best_block_tips'][0]['height'], int) - self.assertEqual(dag_data['best_block_tips'][0]['hash'], SETTINGS.GENESIS_BLOCK_HASH.hex()) + self.assertEqual(dag_data['best_block_tips'][0]['hash'], self._settings.GENESIS_BLOCK_HASH.hex()) self.assertEqual(dag_data['best_block_tips'][0]['height'], 0) self.assertIsNotNone(dag_data['best_block']) self.assertIn('hash', dag_data['best_block']) self.assertIn('height', dag_data['best_block']) self.assertIsInstance(dag_data['best_block']['hash'], str) self.assertIsInstance(dag_data['best_block']['height'], int) - self.assertEqual(dag_data['best_block']['hash'], SETTINGS.GENESIS_BLOCK_HASH.hex()) + self.assertEqual(dag_data['best_block']['hash'], self._settings.GENESIS_BLOCK_HASH.hex()) self.assertEqual(dag_data['best_block']['height'], 0) @inlineCallbacks diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index 94b0d45e7..0550751eb 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -40,6 +40,9 @@ def test_get_block_template_with_address(self): 'feature_activation_bit_counts': [0, 0, 0, 0], 'accumulated_weight_raw': '2', 'score_raw': '0', + 'nc_block_root_id': None, + 'nc_execution': None, + 'nc_calls': None, }, 'tokens': [], 'data': '', @@ -75,6 +78,9 @@ def test_get_block_template_without_address(self): 'feature_activation_bit_counts': [0, 0, 0, 0], 'accumulated_weight_raw': '2', 'score_raw': '0', + 'nc_block_root_id': None, + 'nc_execution': None, + 'nc_calls': None, }, 'tokens': [], 'data': '', diff --git a/tests/resources/transaction/test_pushtx.py b/tests/resources/transaction/test_pushtx.py index 174ee3586..3f488e429 100644 --- a/tests/resources/transaction/test_pushtx.py +++ b/tests/resources/transaction/test_pushtx.py @@ -18,9 +18,6 @@ class BasePushTxTest(_BaseResourceTest._ResourceTest): is_post: Optional[bool] = None - # XXX: we will get a "two instances of the same tx in memory" otherwise - use_memory_storage = True - def setUp(self): super().setUp() self.web = StubSite(PushTxResource(self.manager)) @@ -233,6 +230,9 @@ def test_spending_voided(self) -> Generator: data = response.json_value() self.assertTrue(data['success']) + # We have to get tx2 from the storage because the saved instance is different from the one we created here. + tx2 = self.manager.tx_storage.get_transaction(tx2.hash) + # Now we set this tx2 as voided and try to push a tx3 that spends tx2 tx_meta = tx2.get_metadata() tx_meta.voided_by = {tx2.hash} diff --git a/tests/resources/transaction/test_tx.py b/tests/resources/transaction/test_tx.py index 9419ae494..3a005dbfc 100644 --- a/tests/resources/transaction/test_tx.py +++ b/tests/resources/transaction/test_tx.py @@ -11,9 +11,6 @@ class TransactionTest(_BaseResourceTest._ResourceTest): - # XXX: using memory storage so that we can more easily manipulate the tokens-index for a test - use_memory_storage = True - def setUp(self): super().setUp() self.web = StubSite(TransactionResource(self.manager)) @@ -130,17 +127,11 @@ def test_get_one_known_tx(self): tx_input.set_static_metadata(TransactionStaticMetadata(min_height=0, closest_ancestor_block=b'')) self.manager.tx_storage.save_transaction(tx_input) - # XXX: this is completely dependant on MemoryTokensIndex implementation, hence use_memory_storage=True token_bytes1 = bytes.fromhex('001c382847d8440d05da95420bee2ebeb32bc437f82a9ae47b0745c8a29a7b0d') - status = self.manager.tx_storage.indexes.tokens._tokens[token_bytes1] - status.name = 'Test Coin' - status.symbol = 'TSC' + self.manager.tx_storage.indexes.tokens.create_token_info(token_bytes1, 'Test Coin', 'TSC') - # XXX: this is completely dependant on MemoryTokensIndex implementation, hence use_memory_storage=True token_bytes2 = bytes.fromhex('007231eee3cb6160d95172a409d634d0866eafc8775f5729fff6a61e7850aba5') - status2 = self.manager.tx_storage.indexes.tokens._tokens[token_bytes2] - status2.name = 'NewCoin' - status2.symbol = 'NCN' + self.manager.tx_storage.indexes.tokens.create_token_info(token_bytes2, 'NewCoin', 'NCN') response = yield self.web.get( "transaction", {b'id': b'0033784bc8443ba851fd88d81c6f06774ae529f25c1fa8f026884ad0a0e98011'}) @@ -231,11 +222,8 @@ def test_get_one_known_tx_with_authority(self): # Both inputs are the same as the last parent, so no need to manually add them - # XXX: this is completely dependant on MemoryTokensIndex implementation token_bytes1 = bytes.fromhex('000023b318c91dcfd4b967b205dc938f9f5e2fd5114256caacfb8f6dd13db330') - status = self.manager.tx_storage.indexes.tokens._tokens[token_bytes1] - status.name = 'Wat wat' - status.symbol = 'WAT' + self.manager.tx_storage.indexes.tokens.create_token_info(token_bytes1, 'Wat wat', 'WAT') response = yield self.web.get( "transaction", {b'id': b'00005f234469407614bf0abedec8f722bb5e534949ad37650f6077c899741ed7'}) @@ -290,12 +278,10 @@ def test_get_many(self): # Get last 2 blocks expected1 = blocks[-2:] - expected1.reverse() - response1 = yield self.web.get("transaction", {b'count': b'2', b'type': b'block'}) data1 = response1.json_value() - for expected, result in zip(expected1, data1['transactions']): + for expected, result in zip(reversed(expected1), data1['transactions'], strict=True): self.assertEqual(expected.timestamp, result['timestamp']) self.assertEqual(expected.hash.hex(), result['tx_id']) @@ -303,21 +289,18 @@ def test_get_many(self): # Get last 8 txs expected2 = txs[-8:] - expected2.reverse() - response2 = yield self.web.get("transaction", {b'count': b'8', b'type': b'tx'}) data2 = response2.json_value() - for expected, result in zip(expected2, data2['transactions']): + for expected, result in zip(reversed(expected2), data2['transactions'], strict=True): self.assertEqual(expected.timestamp, result['timestamp']) self.assertEqual(expected.hash.hex(), result['tx_id']) self.assertTrue(data2['has_more']) # Get older blocks with hash reference - expected3 = blocks[:2] - expected3.reverse() - + genesis_block = self.manager.tx_storage.get_genesis(self._settings.GENESIS_BLOCK_HASH) + expected3 = [genesis_block, *blocks[:2]] response3 = yield self.web.get( "transaction", { b'count': b'3', @@ -328,7 +311,7 @@ def test_get_many(self): }) data3 = response3.json_value() - for expected, result in zip(expected3, data3['transactions']): + for expected, result in zip(reversed(expected3), data3['transactions'], strict=True): self.assertEqual(expected.timestamp, result['timestamp']) self.assertEqual(expected.hash.hex(), result['tx_id']) @@ -345,7 +328,7 @@ def test_get_many(self): }) data4 = response4.json_value() - for expected, result in zip(expected2, data4['transactions']): + for expected, result in zip(expected2, data4['transactions'], strict=True): self.assertEqual(expected.timestamp, result['timestamp']) self.assertEqual(expected.hash.hex(), result['tx_id']) @@ -353,19 +336,17 @@ def test_get_many(self): # Get newer blocks with hash reference expected5 = blocks[-2:] - expected5.reverse() - response5 = yield self.web.get( "transaction", { b'count': b'3', b'type': b'block', - b'timestamp': bytes(str(expected1[-1].timestamp), 'utf-8'), - b'hash': bytes(expected1[-1].hash.hex(), 'utf-8'), + b'timestamp': bytes(str(blocks[-3].timestamp), 'utf-8'), + b'hash': bytes(blocks[-3].hash.hex(), 'utf-8'), b'page': b'previous' }) data5 = response5.json_value() - for expected, result in zip(expected5, data5['transactions']): + for expected, result in zip(expected5, data5['transactions'], strict=True): self.assertEqual(expected.timestamp, result['timestamp']) self.assertEqual(expected.hash.hex(), result['tx_id']) @@ -373,8 +354,6 @@ def test_get_many(self): # Get txs with hash reference expected6 = txs[:8] - expected6.reverse() - response6 = yield self.web.get( "transaction", { b'count': b'8', @@ -385,7 +364,7 @@ def test_get_many(self): }) data6 = response6.json_value() - for expected, result in zip(expected6, data6['transactions']): + for expected, result in zip(reversed(expected6), data6['transactions'], strict=True): self.assertEqual(expected.timestamp, result['timestamp']) self.assertEqual(expected.hash.hex(), result['tx_id']) @@ -462,12 +441,12 @@ def test_zero_count(self): response = yield self.web.get("transaction", {b'count': b'0', b'type': b'block'}) data = response.json_value() self.assertEqual(0, len(data['transactions'])) - self.assertFalse(data['has_more']) + self.assertTrue(data['has_more']) response = yield self.web.get("transaction", {b'count': b'0', b'type': b'tx'}) data = response.json_value() self.assertEqual(0, len(data['transactions'])) - self.assertFalse(data['has_more']) + self.assertTrue(data['has_more']) @inlineCallbacks def test_negative_count(self): diff --git a/tests/resources/wallet/test_thin_wallet.py b/tests/resources/wallet/test_thin_wallet.py index 43d288d57..5ca5493d8 100644 --- a/tests/resources/wallet/test_thin_wallet.py +++ b/tests/resources/wallet/test_thin_wallet.py @@ -373,6 +373,8 @@ def test_token(self): self.assertEqual(data['melt'][0]['tx_id'], tx.hash_hex) self.assertEqual(data['mint'][0]['index'], 1) self.assertEqual(data['melt'][0]['index'], 2) + self.assertTrue(data['can_mint']) + self.assertTrue(data['can_melt']) self.assertEqual(data['total'], amount) self.assertEqual(data['name'], token_name) self.assertEqual(data['symbol'], token_symbol) diff --git a/tests/serialization/__init__.py b/tests/serialization/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/serialization/test_sized_int.py b/tests/serialization/test_sized_int.py new file mode 100644 index 000000000..e83d504f2 --- /dev/null +++ b/tests/serialization/test_sized_int.py @@ -0,0 +1,104 @@ +import struct + + +def _test_bounds_struct_pack(fmt: str, lower_bound: int, upper_bound: int) -> None: + struct.pack(fmt, lower_bound) + try: + struct.pack(fmt, lower_bound - 1) + except struct.error: + pass + else: + assert False + struct.pack(fmt, upper_bound) + try: + struct.pack(fmt, upper_bound + 1) + except struct.error: + pass + else: + assert False + + +def test_int8_bounds() -> None: + from hathor.nanocontracts.nc_types.sized_int_nc_type import _SizedIntNCType + + class Int8NCType(_SizedIntNCType): + _signed = True + _byte_size = 1 + + lower_bound = Int8NCType._lower_bound_value() + upper_bound = Int8NCType._upper_bound_value() + + assert lower_bound == -128 + assert upper_bound == 127 + + _test_bounds_struct_pack('b', lower_bound, upper_bound) + + +def test_uint8_bounds() -> None: + from hathor.nanocontracts.nc_types.sized_int_nc_type import _SizedIntNCType + + class Uint8NCType(_SizedIntNCType): + _signed = False + _byte_size = 1 + + lower_bound = Uint8NCType._lower_bound_value() + upper_bound = Uint8NCType._upper_bound_value() + + assert lower_bound == 0 + assert upper_bound == 255 + + _test_bounds_struct_pack('B', lower_bound, upper_bound) + + +def test_int32_bounds() -> None: + from hathor.nanocontracts.nc_types.sized_int_nc_type import Int32NCType + + lower_bound = Int32NCType._lower_bound_value() + upper_bound = Int32NCType._upper_bound_value() + + assert lower_bound == -2147483648 + assert upper_bound == 2147483647 + + _test_bounds_struct_pack('i', lower_bound, upper_bound) + + +def test_uint32_bounds() -> None: + from hathor.nanocontracts.nc_types.sized_int_nc_type import Uint32NCType + + lower_bound = Uint32NCType._lower_bound_value() + upper_bound = Uint32NCType._upper_bound_value() + assert lower_bound == 0 + assert upper_bound == 4294967295 + _test_bounds_struct_pack('I', lower_bound, upper_bound) + + +def test_int64_bounds() -> None: + from hathor.nanocontracts.nc_types.sized_int_nc_type import _SizedIntNCType + + class Int64NCType(_SizedIntNCType): + _signed = True + _byte_size = 8 + + lower_bound = Int64NCType._lower_bound_value() + upper_bound = Int64NCType._upper_bound_value() + + assert lower_bound == -9223372036854775808 + assert upper_bound == 9223372036854775807 + + _test_bounds_struct_pack('q', lower_bound, upper_bound) + + +def test_uint64_bounds() -> None: + from hathor.nanocontracts.nc_types.sized_int_nc_type import _SizedIntNCType + + class Uint64NCType(_SizedIntNCType): + _signed = False + _byte_size = 8 + + lower_bound = Uint64NCType._lower_bound_value() + upper_bound = Uint64NCType._upper_bound_value() + + assert lower_bound == 0 + assert upper_bound == 18446744073709551615 + + _test_bounds_struct_pack('Q', lower_bound, upper_bound) diff --git a/tests/simulation/test_simulator.py b/tests/simulation/test_simulator.py index dfe9161c8..8da972b36 100644 --- a/tests/simulation/test_simulator.py +++ b/tests/simulation/test_simulator.py @@ -1,5 +1,4 @@ -import pytest - +from hathor.feature_activation.feature_service import FeatureService from hathor.manager import HathorManager from hathor.simulator import FakeConnection from hathor.simulator.trigger import All as AllTriggers, StopWhenSynced, Trigger @@ -13,7 +12,8 @@ def test_verify_pow(self) -> None: # just get one of the genesis, we don't really need to create any transaction tx = next(iter(manager1.tx_storage.get_all_genesis())) # optional argument must be valid, it just has to not raise any exception, there's no assert for that - VertexVerifier(settings=self._settings).verify_pow(tx, override_weight=0.) + feature_service = FeatureService(settings=self._settings, tx_storage=manager1.tx_storage) + VertexVerifier(settings=self._settings, feature_service=feature_service).verify_pow(tx, override_weight=0.) def test_one_node(self) -> None: manager1 = self.create_peer() @@ -95,7 +95,6 @@ def test_many_miners_since_beginning(self) -> None: for node in nodes[1:]: self.assertTipsEqual(nodes[0], node) - @pytest.mark.flaky(max_runs=5, min_passes=1) def test_new_syncing_peer(self) -> None: nodes = [] miners = [] @@ -142,7 +141,9 @@ def test_new_syncing_peer(self) -> None: for miner in miners: miner.stop() - self.assertTrue(self.simulator.run(3600, trigger=AllTriggers(stop_triggers))) + # TODO Add self.assertTrue(...) when the trigger is fixed. Same as in test_many_miners_since_beginning. + # For further information, see https://github.com/HathorNetwork/hathor-core/pull/815. + self.simulator.run(3600, trigger=AllTriggers(stop_triggers)) for idx, node in enumerate(nodes): self.log.debug(f'checking node {idx}') diff --git a/tests/test_utils/test_leb128.py b/tests/test_utils/test_leb128.py new file mode 100644 index 000000000..4decee9a7 --- /dev/null +++ b/tests/test_utils/test_leb128.py @@ -0,0 +1,217 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import random + +import pytest + +from hathor.utils import leb128 + + +@pytest.mark.parametrize( + ['value', 'expected'], + [ + (2, bytes([2])), + (-2, bytes([0x7e])), + (63, bytes([63])), + (64, bytes([64 + 0x80, 0x00])), + (-64, bytes([64])), + (-65, bytes([0xbf, 0x7f])), + (127, bytes([127 + 0x80, 0])), + (-127, bytes([1 + 0x80, 0x7f])), + (128, bytes([0 + 0x80, 1])), + (-128, bytes([0 + 0x80, 0x7f])), + (129, bytes([1 + 0x80, 1])), + (-129, bytes([0x7f + 0x80, 0x7e])), + ], +) +def test_encode_dwarf_examples_signed(value: int, expected: bytes) -> None: + """ + Examples from the DWARF 5 standard, section 7.6, table 7.8. + https://dwarfstd.org/doc/DWARF5.pdf + """ + assert leb128.encode_signed(value) == expected + + +@pytest.mark.parametrize( + ['value', 'expected'], + [ + (2, bytes([2])), + (63, bytes([63])), + (64, bytes([64])), + (127, bytes([127])), + (128, bytes([0 + 0x80, 1])), + (129, bytes([1 + 0x80, 1])), + ], +) +def test_encode_dwarf_examples_unsigned(value: int, expected: bytes) -> None: + """ + Examples from the DWARF 5 standard, section 7.6, table 7.8. + https://dwarfstd.org/doc/DWARF5.pdf + """ + assert leb128.encode_unsigned(value) == expected + + +def _assert_round_trip_signed(n: int) -> None: + assert leb128.decode_signed(leb128.encode_signed(n) + b'extra bytes') == (n, b'extra bytes'), n + + +def _assert_round_trip_unsigned(n: int) -> None: + assert leb128.decode_unsigned(leb128.encode_signed(n) + b'extra bytes') == (n, b'extra bytes'), n + + +@pytest.mark.parametrize( + ['value'], + [ + (0,), + (2,), + (-2,), + (127,), + (-127,), + (128,), + (-128,), + (129,), + (-129,), + ] +) +def test_round_trip_dwarf_examples_signed(value: int) -> None: + _assert_round_trip_signed(value) + + +@pytest.mark.parametrize( + ['value'], + [ + (0,), + (2,), + (64,), + (65,), + (127,), + (128,), + (129,), + ] +) +def test_round_trip_dwarf_examples_unsigned(value: int) -> None: + _assert_round_trip_unsigned(value) + + +def test_round_trip_edge_cases_signed() -> None: + for n_bytes in range(0, 33): + n = 8 * n_bytes + edge_cases = (-(2**n) - 1, -(2**n), 2**n - 1, 2**n) + for value in edge_cases: + _assert_round_trip_signed(value) + + +def test_round_trip_edge_cases_unsigned() -> None: + for n_bytes in range(1, 33): + n = 8 * n_bytes + edge_cases = (2**n - 1, 2**n, 2**n + 1) + for value in edge_cases: + _assert_round_trip_unsigned(value) + + +def test_round_trip_random_signed() -> None: + for _ in range(1_000_000): + n = random.randint(-(2**256) - 1, 2**256) + _assert_round_trip_signed(n) + + +def test_round_trip_random_unsigned() -> None: + for _ in range(1_000_000): + n = random.randint(0, 2**256) + _assert_round_trip_unsigned(n) + + +@pytest.mark.parametrize( + ['value', 'max_bytes'], + [ + (2, 0), + (-2, 0), + (63, 0), + (-64, 0), + (-65, 1), + (64, 1), + (127, 1), + (-127, 1), + (128, 1), + (-128, 1), + (129, 1), + (-129, 1), + (-8192, 1), + (8191, 1), + (8192, 2), + (-8193, 2), + ], +) +def test_encode_max_bytes_dwarf_examples_signed(value: int, max_bytes: int) -> None: + with pytest.raises(ValueError) as e: + leb128.encode_signed(value, max_bytes=max_bytes) + assert str(e.value) == f'cannot encode more than {max_bytes} bytes' + + +@pytest.mark.parametrize( + ['value', 'max_bytes'], + [ + (2, 0), + (64, 0), + (65, 0), + (127, 0), + (128, 1), + (129, 1), + (16383, 1), + (16384, 2), + ], +) +def test_encode_max_bytes_dwarf_examples_unsigned(value: int, max_bytes: int) -> None: + with pytest.raises(ValueError) as e: + leb128.encode_unsigned(value, max_bytes=max_bytes) + assert str(e.value) == f'cannot encode more than {max_bytes} bytes' + + +@pytest.mark.parametrize( + ['buf', 'max_bytes'], + [ + (bytes([2]), 0), + (bytes([0x7e]), 0), + (bytes([127 + 0x80, 0]), 1), + (bytes([1 + 0x80, 0x7f]), 1), + (bytes([0 + 0x80, 1]), 1), + (bytes([0 + 0x80, 0x7f]), 1), + (bytes([1 + 0x80, 1]), 1), + (bytes([0x7f + 0x80, 0x7e]), 1), + ], +) +def test_decode_max_bytes_dwarf_examples_signed(buf: bytes, max_bytes: int) -> None: + with pytest.raises(ValueError) as e: + leb128.decode_signed(buf, max_bytes=max_bytes) + assert str(e.value) == f'cannot decode more than {max_bytes} bytes' + + +@pytest.mark.parametrize( + ['buf', 'max_bytes'], + [ + (bytes([2]), 0), + (bytes([0x7e]), 0), + (bytes([127 + 0x80, 0]), 1), + (bytes([1 + 0x80, 0x7f]), 1), + (bytes([0 + 0x80, 1]), 1), + (bytes([0 + 0x80, 0x7f]), 1), + (bytes([1 + 0x80, 1]), 1), + (bytes([0x7f + 0x80, 0x7e]), 1), + ], +) +def test_decode_max_bytes_dwarf_examples_unsigned(buf: bytes, max_bytes: int) -> None: + with pytest.raises(ValueError) as e: + leb128.decode_unsigned(buf, max_bytes=max_bytes) + assert str(e.value) == f'cannot decode more than {max_bytes} bytes' diff --git a/tests/tx/test_accumulated_weight.py b/tests/tx/test_accumulated_weight.py index 783a5b07d..79d06b0e0 100644 --- a/tests/tx/test_accumulated_weight.py +++ b/tests/tx/test_accumulated_weight.py @@ -1,5 +1,4 @@ from hathor.simulator.utils import add_new_blocks -from hathor.transaction.storage import TransactionMemoryStorage from hathor.utils.weight import weight_to_work from tests import unittest from tests.utils import add_blocks_unlock_reward, add_new_transactions @@ -8,7 +7,7 @@ class AccumulatedWeightTestCase(unittest.TestCase): def setUp(self): super().setUp() - self.tx_storage = TransactionMemoryStorage(settings=self._settings) + self.tx_storage = self.create_tx_storage() self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] diff --git a/tests/tx/test_block.py b/tests/tx/test_block.py index 1e103ed2f..a22e2eb6e 100644 --- a/tests/tx/test_block.py +++ b/tests/tx/test_block.py @@ -20,19 +20,19 @@ from hathor.conf.settings import HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.feature_service import BlockIsMissingSignal, BlockIsSignaling, FeatureService -from hathor.indexes import MemoryIndexesManager from hathor.transaction import Block from hathor.transaction.exceptions import BlockMustSignalError from hathor.transaction.static_metadata import BlockStaticMetadata -from hathor.transaction.storage import TransactionMemoryStorage, TransactionStorage +from hathor.transaction.storage import TransactionStorage from hathor.transaction.validation_state import ValidationState from hathor.util import not_none from hathor.verification.block_verifier import BlockVerifier +from tests.unittest import TestBuilder def test_calculate_feature_activation_bit_counts_genesis(): settings = get_global_settings() - storage = TransactionMemoryStorage(settings=settings) + storage = TestBuilder().build().tx_storage genesis_block = storage.get_block(settings.GENESIS_BLOCK_HASH) result = genesis_block.static_metadata.feature_activation_bit_counts @@ -41,9 +41,9 @@ def test_calculate_feature_activation_bit_counts_genesis(): @pytest.fixture def tx_storage() -> TransactionStorage: - settings = get_global_settings() - indexes = MemoryIndexesManager() - storage = TransactionMemoryStorage(indexes=indexes, settings=settings) + artifacts = TestBuilder().build() + storage = artifacts.tx_storage + indexes = not_none(artifacts.indexes) feature_activation_bits = [ 0b0000, # 0: boundary block 0b1010, diff --git a/tests/tx/test_blockchain.py b/tests/tx/test_blockchain.py index 0aaa420a6..208257817 100644 --- a/tests/tx/test_blockchain.py +++ b/tests/tx/test_blockchain.py @@ -2,7 +2,6 @@ from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.simulator.utils import add_new_blocks -from hathor.transaction.storage import TransactionMemoryStorage from hathor.utils.weight import weight_to_work from tests import unittest from tests.utils import add_new_transactions @@ -23,7 +22,7 @@ class BlockchainTestCase(unittest.TestCase): def setUp(self): super().setUp() - self.tx_storage = TransactionMemoryStorage(settings=self._settings) + self.tx_storage = self.create_tx_storage() self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] diff --git a/tests/tx/test_cache_storage.py b/tests/tx/test_cache_storage.py index 789011e22..d271a00b1 100644 --- a/tests/tx/test_cache_storage.py +++ b/tests/tx/test_cache_storage.py @@ -13,7 +13,6 @@ def setUp(self): super().setUp() builder = self.get_builder() \ - .use_memory() \ .use_tx_storage_cache(capacity=5) \ .set_wallet(self._create_test_wallet(unlocked=True)) self.manager = self.create_peer_from_builder(builder) diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index dbb96b8f7..003da1b17 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -2,7 +2,7 @@ from hathor.conf import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode -from hathor.transaction.storage import TransactionMemoryStorage +from hathor.feature_activation.feature_service import FeatureService from hathor.verification.verification_service import VerificationService from hathor.verification.vertex_verifier import VertexVerifier from hathor.verification.vertex_verifiers import VertexVerifiers @@ -21,6 +21,8 @@ def get_genesis_output(): address = 'WdmDUMp8KvzhWB7KLgguA2wBiKsh4Ha8eX' elif settings.NETWORK_NAME == 'unittests': address = 'HRXVDmLVdq8pgok1BCUKpiFWdAVAy4a5AJ' + elif settings.NETWORK_NAME.startswith('nano-testnet'): + address = 'WZhKusv57pvzotZrf4s7yt7P7PXEqyFTHk' else: raise ValueError('Network unknown.') @@ -34,10 +36,11 @@ def setUp(self) -> None: self._daa = DifficultyAdjustmentAlgorithm(settings=self._settings) verifiers = VertexVerifiers.create_defaults(settings=self._settings, daa=self._daa, feature_service=Mock()) self._verification_service = VerificationService(settings=self._settings, verifiers=verifiers) - self.storage = TransactionMemoryStorage(settings=settings) + self.storage = self.create_tx_storage() def test_pow(self): - verifier = VertexVerifier(settings=self._settings) + feature_service = FeatureService(settings=self._settings, tx_storage=self.storage) + verifier = VertexVerifier(settings=self._settings, feature_service=feature_service) genesis = self.storage.get_all_genesis() for g in genesis: self.assertEqual(g.calculate_hash(), g.hash) @@ -46,7 +49,7 @@ def test_pow(self): def test_verify(self): genesis = self.storage.get_all_genesis() for g in genesis: - self._verification_service.verify_without_storage(g) + self._verification_service.verify_without_storage(g, self.verification_params) def test_output(self): # Test if block output is valid diff --git a/tests/tx/test_headers.py b/tests/tx/test_headers.py new file mode 100644 index 000000000..be1389118 --- /dev/null +++ b/tests/tx/test_headers.py @@ -0,0 +1,232 @@ +import pytest + +from hathor.exception import InvalidNewTransaction +from hathor.nanocontracts import Blueprint, Context, OnChainBlueprint, public +from hathor.nanocontracts.types import NCActionType +from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.exceptions import HeaderNotSupported +from hathor.transaction.headers import NanoHeader, VertexBaseHeader +from hathor.transaction.headers.nano_header import ADDRESS_LEN_BYTES, NanoHeaderAction +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.transaction.util import VerboseCallback +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + + +class MyTestBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + pass + + +class FakeHeader(VertexBaseHeader): + @classmethod + def deserialize( + cls, + tx: BaseTransaction, + buf: bytes, + *, + verbose: VerboseCallback = None, + ) -> tuple[VertexBaseHeader, bytes]: + raise NotImplementedError + + def serialize(self) -> bytes: + return b'fake header' + + def get_sighash_bytes(self) -> bytes: + return b'fake sighash' + + +class VertexHeadersTest(unittest.TestCase): + def has_nano_header(self, vertex: BaseTransaction) -> bool: + for header in vertex.headers: + if isinstance(header, NanoHeader): + return True + return False + + def setUp(self) -> None: + super().setUp() + self.blueprint_id = b'x' * 32 + self.manager = self.create_peer('unittests') + self.manager.tx_storage.nc_catalog.blueprints[self.blueprint_id] = MyTestBlueprint + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + + self.artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc1.nc_id = "{self.blueprint_id.hex()}" + nc1.nc_method = initialize() + + tx1.out[0] = 5 TKA + tx2.out[0] = 3 TKB + + b12.nc_id = nc1 + b12.nc_method = nop() + + tx2.nc_id = nc1 + tx2.nc_method = nop() + + TKB.nc_id = nc1 + TKB.nc_method = nop() + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + ocb1.ocb_code = test_blueprint1.py, TestBlueprint1 + + dummy < b11 < nc1 < TKA < tx1 < b12 < TKB < tx2 < ocb1 + ''') + self.artifacts.propagate_with(self.manager, up_to='dummy') + + self.valid_vertices: list[tuple[str, type[BaseTransaction], bool]] = [ + ('b11', Block, False), + ('nc1', Transaction, True), + ('TKA', TokenCreationTransaction, False), + ('TKB', TokenCreationTransaction, True), + ('tx1', Transaction, False), + ('ocb1', OnChainBlueprint, False), + # TODO: We should also test MergeMinedBlock, but the DAGBuilder doesn't support it yet + ] + + def test_headers_affect_hash(self) -> None: + for name, type_, is_nano in self.valid_vertices: + vertex: BaseTransaction = self.artifacts.get_typed_vertex(name, type_) + assert self.has_nano_header(vertex) == is_nano + + # Test adding a new header. + msg = f'changing headers should change the hash on "{name}"' + clone = vertex.clone(include_storage=False, include_metadata=False) + assert clone.hash == clone.calculate_hash() + clone.headers.append(FakeHeader()) + assert clone.hash != clone.calculate_hash(), msg + + # Now we'll test nano header attributes, so we can skip non-nano txs + if not is_nano: + continue + + assert isinstance(vertex, Transaction) + attributes_and_new_values = [ + ('nc_id', b'123'), + ('nc_seqnum', vertex.get_nano_header().nc_seqnum + 1), + ('nc_method', 'new_method'), + ('nc_args_bytes', b'new args'), + ('nc_actions', [NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=123)]), + ('nc_address', b'\x01' * ADDRESS_LEN_BYTES), + ('nc_script', b'new script'), + ] + + # Test editing existing nano header. + for attribute, new_value in attributes_and_new_values: + clone = vertex.clone(include_storage=False, include_metadata=False) + assert clone.hash == vertex.hash + assert clone.hash == clone.calculate_hash() + setattr(clone.get_nano_header(), attribute, new_value) + assert clone.hash != clone.calculate_hash(), msg + + def test_headers_affect_sighash_all(self) -> None: + for name, type_, is_nano in self.valid_vertices: + vertex: BaseTransaction = self.artifacts.get_typed_vertex(name, type_) + assert self.has_nano_header(vertex) == is_nano + + if not isinstance(vertex, Transaction): + # only transactions have sighash + continue + + # Test adding a new header. + msg = f'changing headers should change the sighash on "{name}"' + clone = vertex.clone(include_storage=False, include_metadata=False) + sighash_before = clone.get_sighash_all(skip_cache=True) + assert sighash_before == vertex.get_sighash_all(skip_cache=True) + clone.headers.append(FakeHeader()) + sighash_after = clone.get_sighash_all(skip_cache=True) + assert sighash_before != sighash_after, msg + + # Now we'll test nano header attributes, so we can skip non-nano txs + if not is_nano: + continue + + assert isinstance(vertex, Transaction) + attributes_and_new_values = [ + ('nc_id', b'123'), + ('nc_seqnum', vertex.get_nano_header().nc_seqnum + 1), + ('nc_method', 'new_method'), + ('nc_args_bytes', b'new args'), + ('nc_actions', [NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=123)]), + ('nc_address', b'\x01' * ADDRESS_LEN_BYTES), + ] + + # Test editing existing nano header. + for attribute, new_value in attributes_and_new_values: + clone = vertex.clone(include_storage=False, include_metadata=False) + sighash_before = clone.get_sighash_all(skip_cache=True) + assert sighash_before == vertex.get_sighash_all(skip_cache=True) + setattr(clone.get_nano_header(), attribute, new_value) + sighash_after = clone.get_sighash_all(skip_cache=True) + assert sighash_before != sighash_after, msg + + # Changing the nc_script does not affect sighash all. + clone = vertex.clone(include_storage=False, include_metadata=False) + sighash_before = clone.get_sighash_all(skip_cache=True) + assert sighash_before == vertex.get_sighash_all(skip_cache=True) + clone.get_nano_header().nc_script = b'new script' + sighash_after = clone.get_sighash_all(skip_cache=True) + assert sighash_before == sighash_after, msg + + def test_nano_header_allowed_vertices(self) -> None: + for name, _type, should_have_nano_header in self.valid_vertices: + vertex: BaseTransaction = self.artifacts.get_typed_vertex(name, _type) + assert self.has_nano_header(vertex) == should_have_nano_header + vertex.storage = self.manager.tx_storage + clone = vertex.clone(include_metadata=False, include_storage=True) + assert bytes(clone) == bytes(vertex) + assert self.manager.on_new_tx(vertex) + + expected_to_fail: list[tuple[str, type[BaseTransaction], bool]] = [ + ('b12', Block, True), + ] + + for name, _type, should_have_nano_header in expected_to_fail: + vertex = self.artifacts.get_typed_vertex(name, _type) + assert self.has_nano_header(vertex) == should_have_nano_header + with pytest.raises(InvalidNewTransaction) as e: + self.manager.on_new_tx(vertex) + assert isinstance(e.value.__cause__, HeaderNotSupported) + + def test_nano_header_round_trip(self) -> None: + tx = Transaction() + header1 = NanoHeader( + tx=tx, + nc_id=b'1' * 32, + nc_seqnum=0, + nc_method='some_method', + nc_args_bytes=b'some args', + nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=123, + ), + ], + nc_address=b'\x01' * ADDRESS_LEN_BYTES, + nc_script=b'some script', + ) + + header1_bytes = header1.serialize() + header2, buf = NanoHeader.deserialize(tx, header1_bytes) + + assert len(buf) == 0 + assert header1_bytes == header2.serialize() + assert header1.tx is header2.tx + assert header1.nc_id == header2.nc_id + assert header1.nc_method == header2.nc_method + assert header1.nc_args_bytes == header2.nc_args_bytes + assert header1.nc_actions == header2.nc_actions + assert header1.nc_address == header2.nc_address + assert header1.nc_script == header2.nc_script diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index 315f87198..2bc5a382d 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -4,10 +4,10 @@ from hathor.storage.rocksdb_storage import RocksDBStorage from hathor.transaction import Transaction from hathor.transaction.vertex_parser import VertexParser -from hathor.util import iwindows +from hathor.util import initialize_hd_wallet, iwindows from hathor.wallet import Wallet from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_custom_tx, add_new_tx, get_genesis_key +from tests.utils import DEFAULT_WORDS, add_blocks_unlock_reward, add_custom_tx, add_new_tx, get_genesis_key class BaseIndexesTest(unittest.TestCase): @@ -29,7 +29,7 @@ def test_tx_tips_with_conflict(self): tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx1) - self.assertTrue(self.manager.propagate_tx(tx1, False)) + self.assertTrue(self.manager.propagate_tx(tx1)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, @@ -44,7 +44,7 @@ def test_tx_tips_with_conflict(self): self.assertIn(tx1.hash, tx2.parents) tx2.timestamp = int(self.clock.seconds()) + 1 self.manager.cpu_mining_service.resolve(tx2) - self.assertTrue(self.manager.propagate_tx(tx2, False)) + self.assertTrue(self.manager.propagate_tx(tx2)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, @@ -56,7 +56,7 @@ def test_tx_tips_with_conflict(self): self.assertIn(tx1.hash, tx3.parents) self.manager.cpu_mining_service.resolve(tx3) self.assertNotEqual(tx2.hash, tx3.hash) - self.assertTrue(self.manager.propagate_tx(tx3, False)) + self.assertTrue(self.manager.propagate_tx(tx3)) self.assertIn(tx3.hash, tx2.get_metadata().conflict_with) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( @@ -86,7 +86,7 @@ def test_tx_tips_voided(self): tx1.parents = self.manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx1) - self.assertTrue(self.manager.propagate_tx(tx1, False)) + self.assertTrue(self.manager.propagate_tx(tx1)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, @@ -99,7 +99,7 @@ def test_tx_tips_voided(self): self.assertIn(tx1.hash, tx2.parents) tx2.timestamp = int(self.clock.seconds()) + 1 self.manager.cpu_mining_service.resolve(tx2) - self.assertTrue(self.manager.propagate_tx(tx2, False)) + self.assertTrue(self.manager.propagate_tx(tx2)) if self.manager.tx_storage.indexes.mempool_tips is not None: self.assertEqual( {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, @@ -113,7 +113,7 @@ def test_tx_tips_voided(self): # self.assertIn(tx1.hash, tx3.parents) self.manager.cpu_mining_service.resolve(tx3) self.assertNotEqual(tx2.hash, tx3.hash) - self.assertTrue(self.manager.propagate_tx(tx3, False)) + self.assertTrue(self.manager.propagate_tx(tx3)) # self.assertIn(tx3.hash, tx2.get_metadata().voided_by) self.assertIn(tx3.hash, tx2.get_metadata().conflict_with) if self.manager.tx_storage.indexes.mempool_tips is not None: @@ -264,7 +264,7 @@ def check_utxos(*args): block2.timestamp = block1.timestamp block2.weight = 4 self.manager.cpu_mining_service.resolve(block2) - self.manager.propagate_tx(block2, fails_silently=False) + self.manager.propagate_tx(block2) self.graphviz.labels[block2.hash] = 'block2' # make sure a reorg did happen as expected @@ -470,7 +470,7 @@ def test_utxo_index_after_push_tx(self): # spend that utxo and check that it is gone from the index address1 = self.get_address(1) - wallet = self.get_wallet() + wallet = initialize_hd_wallet(DEFAULT_WORDS) tx1 = Transaction( timestamp=int(self.clock.seconds()), weight=1.0, @@ -483,7 +483,7 @@ def test_utxo_index_after_push_tx(self): *wallet.get_input_aux_data(tx1.get_sighash_all(), wallet.get_private_key(address)) ) self.manager.cpu_mining_service.resolve(tx1) - self.assertTrue(self.manager.propagate_tx(tx1, False)) + self.assertTrue(self.manager.propagate_tx(tx1)) self.assertEqual( list(utxo_index.iter_utxos(address=address, target_amount=1)), @@ -544,7 +544,7 @@ def test_utxo_index_last(self): change_value = 26 transfer_value = 6400 - change_value - wallet = self.get_wallet() + wallet = initialize_hd_wallet(DEFAULT_WORDS) tx1 = Transaction( timestamp=int(self.clock.seconds()), weight=1.0, @@ -558,7 +558,7 @@ def test_utxo_index_last(self): *wallet.get_input_aux_data(tx1.get_sighash_all(), wallet.get_private_key(address)) ) self.manager.cpu_mining_service.resolve(tx1) - self.assertTrue(self.manager.propagate_tx(tx1, False)) + self.assertTrue(self.manager.propagate_tx(tx1)) # querying for exact values @@ -687,38 +687,13 @@ def test_height_index(self): self.assertEqual(height_index.get_n_height_tips(103), height_index.get_n_height_tips(104)) -class MemoryIndexesTest(BaseIndexesTest): - __test__ = True - - def setUp(self): - from hathor.transaction.storage import TransactionMemoryStorage - - super().setUp() - self.wallet = Wallet() - self.tx_storage = TransactionMemoryStorage(settings=self._settings) - self.genesis = self.tx_storage.get_all_genesis() - self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] - self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] - - # read genesis keys - self.genesis_private_key = get_genesis_key() - self.genesis_public_key = self.genesis_private_key.public_key() - - # this makes sure we can spend the genesis outputs - self.manager = self.create_peer('testnet', tx_storage=self.tx_storage, unlock_wallet=True, wallet_index=True, - use_memory_index=True, utxo_index=True) - self.blocks = add_blocks_unlock_reward(self.manager) - self.last_block = self.blocks[-1] - - self.graphviz = GraphvizVisualizer(self.tx_storage, include_verifications=True, include_funds=True) - - class RocksDBIndexesTest(BaseIndexesTest): __test__ = True def setUp(self): import tempfile + from hathor.nanocontracts.storage import NCRocksDBStorageFactory from hathor.transaction.storage import TransactionRocksDBStorage super().setUp() @@ -727,7 +702,13 @@ def setUp(self): self.tmpdirs.append(directory) rocksdb_storage = RocksDBStorage(path=directory) parser = VertexParser(settings=self._settings) - self.tx_storage = TransactionRocksDBStorage(rocksdb_storage, settings=self._settings, vertex_parser=parser) + nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + self.tx_storage = TransactionRocksDBStorage( + rocksdb_storage, + settings=self._settings, + vertex_parser=parser, + nc_storage_factory=nc_storage_factory, + ) self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] diff --git a/tests/tx/test_indexes2.py b/tests/tx/test_indexes2.py deleted file mode 100644 index ef4357378..000000000 --- a/tests/tx/test_indexes2.py +++ /dev/null @@ -1,73 +0,0 @@ -import tempfile -from typing import TYPE_CHECKING, NamedTuple - -from tests import unittest - -if TYPE_CHECKING: # pragma: no cover - import rocksdb - - -class FakeTransaction(NamedTuple): - hash: bytes - timestamp: int - - -# XXX: sync-bridge used but it doesn't matter, it's only used to generate a random blockchain -class SimpleIndexesTestCase(unittest.TestCase): - def setUp(self): - super().setUp() - # how many transactions will be generated on the same timestamp before increasing it by 1 - self.transactions = [] - repetitions = [1, 1, 10, 10, 10, 2, 1, 0, 0, 5, 5, 5, 0, 1, 1, 10, 10, 10, 1, 2, 3, 1, 100, 100, 1, 100, 0, 1] - ts = self._settings.GENESIS_BLOCK_TIMESTAMP - for rep in repetitions: - for _ in range(rep): - tx = FakeTransaction(self.rng.randbytes(32), ts) - self.transactions.append(tx) - ts += 1 - - def create_tmp_rocksdb_db(self) -> 'rocksdb.DB': - import rocksdb - directory = tempfile.mkdtemp() - self.tmpdirs.append(directory) - options = rocksdb.Options(create_if_missing=True, error_if_exists=True) - return rocksdb.DB(directory, options) - - def test_timestamp_index(self): - # setup two indexes with different backends - from hathor.indexes.memory_timestamp_index import MemoryTimestampIndex - from hathor.indexes.rocksdb_timestamp_index import RocksDBTimestampIndex - from hathor.indexes.timestamp_index import RangeIdx, ScopeType - rocksdb_index = RocksDBTimestampIndex(self.create_tmp_rocksdb_db(), scope_type=ScopeType.ALL) - memory_index = MemoryTimestampIndex(scope_type=ScopeType.ALL) - for tx in self.transactions: - rocksdb_index.add_tx(tx) - memory_index.add_tx(tx) - - # varying count so we stop at varied points - offset_variety = set() - for count in [2, 3, 5, 10, 100]: - self.log.debug('with', count=count) - idx_rocksdb = RangeIdx(0, 0) - idx_memory = RangeIdx(0, 0) - max_iters = 1000 - while max_iters > 0: - self.log.debug('iter', idx=idx_memory) - hashes_memory, idx_memory = memory_index.get_hashes_and_next_idx(idx_memory, count) - hashes_rocksdb, idx_rocksdb = rocksdb_index.get_hashes_and_next_idx(idx_rocksdb, count) - self.assertEqual(hashes_memory, hashes_rocksdb) - self.assertEqual(idx_rocksdb, idx_memory) - # XXX: we verified they're the same, doesn't matter which we pick: - idx = idx_memory - hashes = hashes_memory - self.log.debug('indexes match', idx=idx, hashes=unittest.short_hashes(hashes)) - if idx is None: - break - offset_variety.add(idx[1]) - max_iters -= 1 - else: - self.fail('took too many iterations') - - # just making sure our tests covered enough different cases - self.log.debug('offset variety', offsets=offset_variety) - self.assertGreater(len(offset_variety), 2, msg='too little variety of offset, not enough coverage') diff --git a/tests/tx/test_indexes4.py b/tests/tx/test_indexes4.py index 8a5a98111..8416cc21e 100644 --- a/tests/tx/test_indexes4.py +++ b/tests/tx/test_indexes4.py @@ -1,7 +1,6 @@ from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_blocks, gen_new_tx from hathor.transaction import Transaction -from hathor.transaction.storage import TransactionMemoryStorage from hathor.wallet.base_wallet import WalletOutputInfo from tests import unittest from tests.utils import add_blocks_unlock_reward @@ -9,9 +8,9 @@ class SimulatorIndexesTestCase(unittest.TestCase): def _build_randomized_blockchain(self, *, utxo_index=False): - tx_storage = TransactionMemoryStorage(settings=self._settings) + tx_storage = self.create_tx_storage() manager = self.create_peer('testnet', tx_storage=tx_storage, unlock_wallet=True, wallet_index=True, - use_memory_index=True, utxo_index=utxo_index) + utxo_index=utxo_index) add_new_blocks(manager, 50, advance_clock=15) @@ -29,7 +28,7 @@ def _build_randomized_blockchain(self, *, utxo_index=False): tx1.parents = manager.get_new_tx_parents() tx1.timestamp = int(self.clock.seconds()) manager.cpu_mining_service.resolve(tx1) - assert manager.propagate_tx(tx1, False) + assert manager.propagate_tx(tx1) tx2 = manager.wallet.prepare_transaction_compute_inputs(Transaction, outputs, manager.tx_storage) tx2.weight = 2.0 @@ -37,13 +36,13 @@ def _build_randomized_blockchain(self, *, utxo_index=False): self.assertIn(tx1.hash, tx2.parents) tx2.timestamp = int(self.clock.seconds()) + 1 manager.cpu_mining_service.resolve(tx2) - assert manager.propagate_tx(tx2, False) + assert manager.propagate_tx(tx2) tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.weight = 3.0 tx3.parents = tx1.parents manager.cpu_mining_service.resolve(tx3) - assert manager.propagate_tx(tx3, False) + assert manager.propagate_tx(tx3) for _ in range(100): address = self.get_address(0) @@ -53,8 +52,6 @@ def _build_randomized_blockchain(self, *, utxo_index=False): return manager def test_index_initialization(self): - from copy import deepcopy - self.manager = self._build_randomized_blockchain(utxo_index=True) # XXX: this test makes use of the internals of TipsIndex, AddressIndex and UtxoIndex @@ -74,8 +71,8 @@ def test_index_initialization(self): base_all_tips_tree = tx_storage.indexes.all_tips.tree.copy() base_block_tips_tree = tx_storage.indexes.block_tips.tree.copy() base_tx_tips_tree = tx_storage.indexes.tx_tips.tree.copy() - base_address_index = deepcopy(tx_storage.indexes.addresses.index) - base_utxo_index = deepcopy(tx_storage.indexes.utxo._index) + base_address_index = list(tx_storage.indexes.addresses.get_all_internal()) + base_utxo_index = list(tx_storage.indexes.utxo.get_all_internal()) # reset the indexes and force a re-initialization of all indexes tx_storage._manually_initialize() @@ -85,8 +82,8 @@ def test_index_initialization(self): reinit_all_tips_tree = tx_storage.indexes.all_tips.tree.copy() reinit_block_tips_tree = tx_storage.indexes.block_tips.tree.copy() reinit_tx_tips_tree = tx_storage.indexes.tx_tips.tree.copy() - reinit_address_index = deepcopy(tx_storage.indexes.addresses.index) - reinit_utxo_index = deepcopy(tx_storage.indexes.utxo._index) + reinit_address_index = list(tx_storage.indexes.addresses.get_all_internal()) + reinit_utxo_index = list(tx_storage.indexes.utxo.get_all_internal()) self.assertEqual(reinit_all_tips_tree, base_all_tips_tree) self.assertEqual(reinit_block_tips_tree, base_block_tips_tree) @@ -102,8 +99,8 @@ def test_index_initialization(self): newinit_all_tips_tree = tx_storage.indexes.all_tips.tree.copy() newinit_block_tips_tree = tx_storage.indexes.block_tips.tree.copy() newinit_tx_tips_tree = tx_storage.indexes.tx_tips.tree.copy() - newinit_address_index = deepcopy(tx_storage.indexes.addresses.index) - newinit_utxo_index = deepcopy(tx_storage.indexes.utxo._index) + newinit_address_index = list(tx_storage.indexes.addresses.get_all_internal()) + newinit_utxo_index = list(tx_storage.indexes.utxo.get_all_internal()) self.assertEqual(newinit_all_tips_tree, base_all_tips_tree) self.assertEqual(newinit_block_tips_tree, base_block_tips_tree) diff --git a/tests/tx/test_indexes_nc_history.py b/tests/tx/test_indexes_nc_history.py new file mode 100644 index 000000000..a45a5da89 --- /dev/null +++ b/tests/tx/test_indexes_nc_history.py @@ -0,0 +1,219 @@ +from hathor.conf import HathorSettings +from hathor.crypto.util import get_address_b58_from_bytes +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.utils import sign_openssl +from hathor.storage.rocksdb_storage import RocksDBStorage +from hathor.transaction import Transaction +from hathor.transaction.headers import NanoHeader +from hathor.transaction.storage import TransactionRocksDBStorage +from hathor.util import not_none +from hathor.wallet import KeyPair, Wallet +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.utils import add_blocks_unlock_reward, get_genesis_key + +settings = HathorSettings() + + +class MyTestBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + pass + + +class NCHistoryIndexesTest(unittest.TestCase): + __test__ = False + + def test_basic(self): + blueprint_id = b'x' * 32 + self.catalog = NCBlueprintCatalog({ + blueprint_id: MyTestBlueprint + }) + self.manager.tx_storage.nc_catalog = self.catalog + + parents = self.manager.get_new_tx_parents() + nc = Transaction(weight=1, inputs=[], outputs=[], parents=parents, storage=self.tx_storage) + + nc_id = blueprint_id + nc_method = 'initialize' + nc_args_bytes = b'\00' + + key = KeyPair.create(b'my-pass') + privkey = key.get_private_key(b'my-pass') + + nano_header = NanoHeader( + tx=nc, + nc_seqnum=0, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=[], + ) + nc.headers.append(nano_header) + + sign_openssl(nano_header, privkey) + self.manager.cpu_mining_service.resolve(nc) + + self.assertTrue(self.manager.on_new_tx(nc)) + + contract_id = nc.hash + nc_history_index = self.manager.tx_storage.indexes.nc_history + self.assertEqual( + [nc.hash], + list(nc_history_index.get_sorted_from_contract_id(contract_id)) + ) + + addresses_index = self.manager.tx_storage.indexes.addresses + address = get_address_b58_from_bytes(nano_header.nc_address) + self.assertEqual( + [nc.hash], + list(addresses_index.get_sorted_from_address(address)) + ) + + def test_latest_tx_timestamp(self) -> None: + blueprint_id = b'x' * 32 + catalog = NCBlueprintCatalog({ + blueprint_id: MyTestBlueprint + }) + manager = self.create_peer('testnet', nc_indexes=True) + nc_history_index = manager.tx_storage.indexes.nc_history + manager.tx_storage.nc_catalog = catalog + dag_builder = TestDAGBuilder.from_manager(manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..11] + b10 < dummy + + nc1.nc_id = "{blueprint_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = nop() + + nc1 <-- nc2 <-- b11 + ''') + artifacts.propagate_with(manager) + + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + + assert nc_history_index.get_latest_tx_timestamp(nc1.hash) == nc2.timestamp + assert nc_history_index.get_latest_tx_timestamp(nc2.hash) is None + + def test_transaction_count(self) -> None: + builder = self.get_builder().enable_nc_indexes() + manager = self.create_peer_from_builder(builder) + assert isinstance(manager.tx_storage, TransactionRocksDBStorage) + path = manager.tx_storage._rocksdb_storage.path + indexes_manager = not_none(manager.tx_storage.indexes) + nc_history_index = not_none(indexes_manager.nc_history) + private_key = unittest.OCB_TEST_PRIVKEY.hex() + password = unittest.OCB_TEST_PASSWORD.hex() + dag_builder = TestDAGBuilder.from_manager(manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..11] + b10 < dummy + + ocb1.ocb_private_key = "{private_key}" + ocb1.ocb_password = "{password}" + ocb1.ocb_code = test_blueprint1.py, TestBlueprint1 + + ocb2.ocb_private_key = "{private_key}" + ocb2.ocb_password = "{password}" + ocb2.ocb_code = test_blueprint1.py, TestBlueprint1 + + nc1.nc_id = ocb1 + nc1.nc_method = initialize(0) + + nc2.nc_id = ocb2 + nc2.nc_method = initialize(0) + + nc3.nc_id = nc2 + nc3.nc_method = nop() + + nc4.nc_id = nc1 + nc4.nc_method = nop() + + nc5.nc_id = nc2 + nc5.nc_method = nop() + + nc6.nc_id = nc2 + nc6.nc_method = nop() + + nc7.nc_id = nc1 + nc7.nc_method = nop() + + ocb1 <-- ocb2 <-- b11 + b11 < nc1 < nc2 < nc3 < nc4 < nc5 < nc6 < nc7 + ''') + + artifacts.propagate_with(manager) + nc1, nc2, nc6, nc7 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc6', 'nc7'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc6.is_nano_contract() + assert nc7.is_nano_contract() + + assert nc_history_index.get_transaction_count(nc1.hash) == 3 + assert nc_history_index.get_transaction_count(nc2.hash) == 4 + + assert isinstance(manager.tx_storage, TransactionRocksDBStorage) + manager.stop() + manager.tx_storage._rocksdb_storage.close() + + # Test loading counts from existing db + builder2 = self.get_builder().set_rocksdb_path(path).enable_nc_indexes() + manager2 = self.create_peer_from_builder(builder2) + indexes_manager2 = not_none(manager2.tx_storage.indexes) + nc_history_index = not_none(indexes_manager2.nc_history) + + assert nc_history_index.get_transaction_count(nc1.hash) == 3 + assert nc_history_index.get_transaction_count(nc2.hash) == 4 + + +class RocksDBNCHistoryIndexesTest(NCHistoryIndexesTest): + __test__ = True + + def setUp(self): + import tempfile + + from hathor.nanocontracts.storage import NCRocksDBStorageFactory + from hathor.transaction.storage import TransactionRocksDBStorage + from hathor.transaction.vertex_parser import VertexParser + + super().setUp() + self.wallet = Wallet() + directory = tempfile.mkdtemp() + self.tmpdirs.append(directory) + rocksdb_storage = RocksDBStorage(path=directory) + vertex_parser = VertexParser(settings=self._settings) + nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + self.tx_storage = TransactionRocksDBStorage(rocksdb_storage, + settings=self._settings, + vertex_parser=vertex_parser, + nc_storage_factory=nc_storage_factory) + self.genesis = self.tx_storage.get_all_genesis() + self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] + self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] + + # read genesis keys + self.genesis_private_key = get_genesis_key() + self.genesis_public_key = self.genesis_private_key.public_key() + + # this makes sure we can spend the genesis outputs + self.manager = self.create_peer('testnet', tx_storage=self.tx_storage, unlock_wallet=True, wallet_index=True, + utxo_index=True, nc_indexes=True) + self.blocks = add_blocks_unlock_reward(self.manager) + self.last_block = self.blocks[-1] + + from hathor.graphviz import GraphvizVisualizer + self.graphviz = GraphvizVisualizer(self.tx_storage, include_verifications=True, include_funds=True) diff --git a/tests/tx/test_mining.py b/tests/tx/test_mining.py index 17d77450d..0e62acd30 100644 --- a/tests/tx/test_mining.py +++ b/tests/tx/test_mining.py @@ -3,7 +3,6 @@ from hathor.mining import BlockTemplate from hathor.simulator.utils import add_new_blocks from hathor.transaction import Block -from hathor.transaction.storage import TransactionMemoryStorage from hathor.utils.weight import weight_to_work from tests import unittest @@ -23,7 +22,7 @@ class MiningTest(unittest.TestCase): def setUp(self): super().setUp() - self.tx_storage = TransactionMemoryStorage(settings=self._settings) + self.tx_storage = self.create_tx_storage() self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] diff --git a/tests/tx/test_multisig.py b/tests/tx/test_multisig.py index 25222b90d..82ac152d9 100644 --- a/tests/tx/test_multisig.py +++ b/tests/tx/test_multisig.py @@ -1,6 +1,7 @@ import base58 from hathor.crypto.util import decode_address, get_private_key_from_bytes, get_public_key_bytes_compressed +from hathor.exception import InvalidNewTransaction from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.exceptions import ScriptError @@ -102,7 +103,8 @@ def test_spend_multisig(self): self.manager.cpu_mining_service.resolve(tx) # Transaction is still locked - self.assertFalse(self.manager.propagate_tx(tx)) + with self.assertRaises(InvalidNewTransaction): + self.manager.propagate_tx(tx) self.clock.advance(6) tx.timestamp = int(self.clock.seconds()) @@ -116,7 +118,8 @@ def test_spend_multisig(self): tx2 = Transaction.create_from_struct(tx.get_struct()) tx2.inputs[0].data = p2pkh_input_data self.manager.cpu_mining_service.resolve(tx2) - self.assertFalse(self.manager.propagate_tx(tx2)) + with self.assertRaises(InvalidNewTransaction): + self.manager.propagate_tx(tx2) # Now we propagate the correct self.assertTrue(self.manager.propagate_tx(tx)) diff --git a/tests/tx/test_reward_lock.py b/tests/tx/test_reward_lock.py index 55f062f56..9c5115ef7 100644 --- a/tests/tx/test_reward_lock.py +++ b/tests/tx/test_reward_lock.py @@ -7,7 +7,6 @@ from hathor.transaction import Block, Transaction, TxInput, TxOutput from hathor.transaction.exceptions import RewardLocked from hathor.transaction.scripts import P2PKH -from hathor.transaction.storage import TransactionMemoryStorage from hathor.wallet import Wallet from tests import unittest from tests.utils import add_blocks_unlock_reward, get_genesis_key @@ -23,7 +22,7 @@ def setUp(self): self.genesis_public_key = self.genesis_private_key.public_key() # this makes sure we can spend the genesis outputs - self.tx_storage = TransactionMemoryStorage(settings=self._settings) + self.tx_storage = self.create_tx_storage() self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] @@ -73,13 +72,13 @@ def test_classic_reward_lock(self) -> None: tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) add_new_blocks(self.manager, 1, advance_clock=1) # now it should be spendable tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) - self.assertTrue(self.manager.propagate_tx(tx, fails_silently=False)) + self.assertTrue(self.manager.propagate_tx(tx)) def test_block_with_not_enough_height(self) -> None: # add block with a reward we can spend @@ -93,7 +92,7 @@ def test_block_with_not_enough_height(self) -> None: # transaction before it can the RewardLocked exception is raised tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) - self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False, reject_locked_reward=False)) + self.assertTrue(self.manager.on_new_tx(tx, reject_locked_reward=False)) # new block will try to confirm it and fail with pytest.raises(InvalidNewTransaction) as e: @@ -115,7 +114,7 @@ def test_block_with_enough_height(self) -> None: # add tx that spends the reward tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) - self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) + self.assertTrue(self.manager.on_new_tx(tx)) # new block will be able to confirm it add_new_blocks(self.manager, 1, advance_clock=1) @@ -133,9 +132,9 @@ def test_mempool_tx_with_not_enough_height(self) -> None: tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) with self.assertRaises(InvalidNewTransaction): - self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) + self.assertTrue(self.manager.on_new_tx(tx)) def test_mempool_tx_with_enough_height(self) -> None: # add block with a reward we can spend @@ -147,7 +146,7 @@ def test_mempool_tx_with_enough_height(self) -> None: # add tx that spends the reward, must not fail tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) - self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) + self.assertTrue(self.manager.on_new_tx(tx)) def test_mempool_tx_invalid_after_reorg(self) -> None: # add block with a reward we can spend @@ -161,7 +160,7 @@ def test_mempool_tx_invalid_after_reorg(self) -> None: balance_per_address = self.manager.wallet.get_balance_per_address(self._settings.HATHOR_TOKEN_UID) assert tx_address not in balance_per_address self.assertEqual(tx.static_metadata.min_height, unlock_height) - self.assertTrue(self.manager.on_new_tx(tx, fails_silently=False)) + self.assertTrue(self.manager.on_new_tx(tx)) balance_per_address = self.manager.wallet.get_balance_per_address(self._settings.HATHOR_TOKEN_UID) assert balance_per_address[tx_address] == 6400 @@ -171,12 +170,12 @@ def test_mempool_tx_invalid_after_reorg(self) -> None: b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage) b0.weight = 10 self.manager.cpu_mining_service.resolve(b0) - self.manager.propagate_tx(b0, fails_silently=False) + self.manager.propagate_tx(b0) self.clock.advance(1) # now the new tx should not pass verification considering the reward lock with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # the transaction should have been removed from the mempool self.assertNotIn(tx, self.manager.tx_storage.iter_mempool_from_best_index()) @@ -216,4 +215,4 @@ def test_classic_reward_lock_timestamp_expected_to_fail(self) -> None: self.manager.cpu_mining_service.resolve(tx) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) diff --git a/tests/tx/test_scripts.py b/tests/tx/test_scripts.py index 34ce6ac25..89cbe5183 100644 --- a/tests/tx/test_scripts.py +++ b/tests/tx/test_scripts.py @@ -18,17 +18,11 @@ TimeLocked, VerifyFailed, ) -from hathor.transaction.scripts import ( - P2PKH, - HathorScript, - MultiSig, - Opcode, - ScriptExtras, - create_base_script, - create_output_script, -) -from hathor.transaction.scripts.construct import count_sigops, get_pushdata, get_sigops_count, re_compile +from hathor.transaction.scripts import P2PKH, HathorScript, MultiSig, Opcode, create_base_script, create_output_script +from hathor.transaction.scripts.construct import SigopCounter, get_pushdata, re_compile from hathor.transaction.scripts.execute import ( + Stack, + UtxoScriptExtras, binary_to_int, decode_opn, evaluate_final_stack, @@ -56,16 +50,16 @@ op_pushdata1, ) from hathor.transaction.scripts.script_context import ScriptContext -from hathor.transaction.storage import TransactionMemoryStorage from hathor.wallet import HDWallet from tests import unittest from tests.utils import BURN_ADDRESS, get_genesis_key class TestScripts(unittest.TestCase): - def setUp(self): + def setUp(self) -> None: super().setUp() - tx_storage = TransactionMemoryStorage(settings=self._settings) + tx_storage = self.create_tx_storage() + self.genesis_blocks = [tx for tx in tx_storage.get_all_genesis() if tx.is_block] self.genesis_txs = [tx for tx in tx_storage.get_all_genesis() if not tx.is_block] @@ -73,7 +67,17 @@ def setUp(self): self.genesis_private_key = get_genesis_key() self.genesis_public_key = self.genesis_private_key.public_key() - def test_data_pattern(self): + # force checkdatasig count to be enabled + self.counter = SigopCounter( + max_multisig_pubkeys=self._settings.MAX_MULTISIG_PUBKEYS, + enable_checkdatasig_count=True, + ) + self.counter_old = SigopCounter( + max_multisig_pubkeys=self._settings.MAX_MULTISIG_PUBKEYS, + enable_checkdatasig_count=False, + ) + + def test_data_pattern(self) -> None: # up to 75 bytes, no Opcode is needed s = HathorScript() re_match = re_compile('^DATA_75$') @@ -117,7 +121,7 @@ def test_data_pattern(self): match = re_match.search(s.data) self.assertIsNone(match) - def test_push_integers(self): + def test_push_integers(self) -> None: # 1 byte s = HathorScript() s.pushData(255) @@ -146,8 +150,8 @@ def test_push_integers(self): self.assertEqual(8, len(n)) self.assertEqual(4294967296, binary_to_int(n)) - def test_pushdata(self): - stack = [] + def test_pushdata(self) -> None: + stack: Stack = [] random_bytes = b'a' * 50 s = HathorScript() s.pushData(random_bytes) @@ -159,8 +163,8 @@ def test_pushdata(self): with self.assertRaises(OutOfData): op_pushdata(0, s.data[:-1], stack) - def test_pushdata1(self): - stack = [] + def test_pushdata1(self) -> None: + stack: Stack = [] random_bytes = b'a' * 100 s = HathorScript() s.pushData(random_bytes) @@ -174,15 +178,15 @@ def test_pushdata1(self): with self.assertRaises(OutOfData): op_pushdata1(0, s.data[:-1], stack) - def test_dup(self): + def test_dup(self) -> None: with self.assertRaises(MissingStackItems): op_dup(ScriptContext(stack=[], logs=[], extras=Mock())) - stack = [1] + stack: Stack = [1] op_dup(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack[-1], stack[-2]) - def test_equalverify(self): + def test_equalverify(self) -> None: elem = b'a' with self.assertRaises(MissingStackItems): op_equalverify(ScriptContext(stack=[elem], logs=[], extras=Mock())) @@ -193,7 +197,7 @@ def test_equalverify(self): with self.assertRaises(EqualVerifyFailed): op_equalverify(ScriptContext(stack=[elem, b'aaaa'], logs=[], extras=Mock())) - def test_checksig_raise_on_uncompressed_pubkey(self): + def test_checksig_raise_on_uncompressed_pubkey(self) -> None: """ Uncompressed pubkeys shoud not be accepted, even if they solve the signature """ block = self.genesis_blocks[0] @@ -219,7 +223,7 @@ def test_checksig_raise_on_uncompressed_pubkey(self): with self.assertRaises(ScriptError): op_checksig(ScriptContext(stack=[signature, pubkey_uncompressed], logs=[], extras=Mock())) - def test_checksig_check_for_compressed_pubkey(self): + def test_checksig_check_for_compressed_pubkey(self) -> None: """ Compressed pubkeys bytes representation always start with a byte 2 or 3 - test for invalid bytes starting with bytes 2 and 3 - test for bytes not starting with byte 2 or 3 @@ -236,7 +240,7 @@ def test_checksig_check_for_compressed_pubkey(self): with self.assertRaises(ScriptError): op_checksig(ScriptContext(stack=[b'\x0423', b'\x0423'], logs=[], extras=Mock())) - def test_checksig(self): + def test_checksig(self) -> None: with self.assertRaises(MissingStackItems): op_checksig(ScriptContext(stack=[1], logs=[], extras=Mock())) @@ -253,10 +257,10 @@ def test_checksig(self): signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) # wrong signature puts False (0) on stack - stack = [b'aaaaaaaaa', pubkey_bytes] + stack: Stack = [b'aaaaaaaaa', pubkey_bytes] op_checksig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(0, stack.pop()) @@ -264,7 +268,7 @@ def test_checksig(self): op_checksig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(1, stack.pop()) - def test_checksig_cache(self): + def test_checksig_cache(self) -> None: block = self.genesis_blocks[0] from hathor.transaction import Transaction, TxInput, TxOutput @@ -278,25 +282,25 @@ def test_checksig_cache(self): signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) - stack = [signature, pubkey_bytes] + stack: Stack = [signature, pubkey_bytes] self.assertIsNone(tx._sighash_data_cache) op_checksig(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertIsNotNone(tx._sighash_data_cache) self.assertEqual(1, stack.pop()) - def test_hash160(self): + def test_hash160(self) -> None: with self.assertRaises(MissingStackItems): op_hash160(ScriptContext(stack=[], logs=[], extras=Mock())) elem = b'aaaaaaaa' hash160 = get_hash160(elem) - stack = [elem] + stack: Stack = [elem] op_hash160(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(hash160, stack.pop()) - def test_checkdatasig_raise_on_uncompressed_pubkey(self): + def test_checkdatasig_raise_on_uncompressed_pubkey(self) -> None: block = self.genesis_blocks[0] data = b'some_random_data' @@ -314,7 +318,7 @@ def test_checkdatasig_raise_on_uncompressed_pubkey(self): pubkey_uncompressed = self.genesis_public_key.public_bytes(Encoding.X962, PublicFormat.UncompressedPoint) # ScriptError if pubkey is not a valid compressed public key # with wrong signature - stack = [data, b'123', pubkey_uncompressed] + stack: Stack = [data, b'123', pubkey_uncompressed] with self.assertRaises(ScriptError): op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) # or with rigth one @@ -323,7 +327,7 @@ def test_checkdatasig_raise_on_uncompressed_pubkey(self): with self.assertRaises(ScriptError): op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) - def test_checkdatasig_check_for_compressed_pubkey(self): + def test_checkdatasig_check_for_compressed_pubkey(self) -> None: # ScriptError if pubkey is not a public key but starts with 2 or 3 with self.assertRaises(ScriptError): op_checkdatasig(ScriptContext(stack=[b'\x0233', b'\x0233', b'\x0233'], logs=[], extras=Mock())) @@ -334,7 +338,7 @@ def test_checkdatasig_check_for_compressed_pubkey(self): with self.assertRaises(ScriptError): op_checkdatasig(ScriptContext(stack=[b'\x0123', b'\x0123', b'\x0123'], logs=[], extras=Mock())) - def test_checkdatasig(self): + def test_checkdatasig(self) -> None: with self.assertRaises(MissingStackItems): op_checkdatasig(ScriptContext(stack=[1, 1], logs=[], extras=Mock())) @@ -342,7 +346,7 @@ def test_checkdatasig(self): signature = self.genesis_private_key.sign(data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - stack = [data, signature, pubkey_bytes] + stack: Stack = [data, signature, pubkey_bytes] # no exception should be raised and data is left on stack op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(data, stack.pop()) @@ -351,7 +355,7 @@ def test_checkdatasig(self): with self.assertRaises(OracleChecksigFailed): op_checkdatasig(ScriptContext(stack=stack, logs=[], extras=Mock())) - def test_get_data_value(self): + def test_get_data_value(self) -> None: value0 = b'value0' value1 = b'vvvalue1' value2 = b'vvvvvalue2' @@ -368,7 +372,7 @@ def test_get_data_value(self): with self.assertRaises(OutOfData): get_data_value(2, data[:-1]) - def test_data_strequal(self): + def test_data_strequal(self) -> None: with self.assertRaises(MissingStackItems): op_data_strequal(ScriptContext(stack=[1, 1], logs=[], extras=Mock())) @@ -378,7 +382,7 @@ def test_data_strequal(self): data = (bytes([len(value0)]) + value0 + bytes([len(value1)]) + value1 + bytes([len(value2)]) + value2) - stack = [data, 0, value0] + stack: Stack = [data, 0, value0] op_data_strequal(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), data) @@ -390,7 +394,7 @@ def test_data_strequal(self): with self.assertRaises(VerifyFailed): op_data_strequal(ScriptContext(stack=stack, logs=[], extras=Mock())) - def test_data_greaterthan(self): + def test_data_greaterthan(self) -> None: with self.assertRaises(MissingStackItems): op_data_greaterthan(ScriptContext(stack=[1, 1], logs=[], extras=Mock())) @@ -399,7 +403,7 @@ def test_data_greaterthan(self): data = (bytes([len(value0)]) + value0 + bytes([len(value1)]) + value1) - stack = [data, 0, struct.pack('!I', 999)] + stack: Stack = [data, 0, struct.pack('!I', 999)] op_data_greaterthan(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), data) @@ -419,14 +423,14 @@ def test_data_greaterthan(self): with self.assertRaises(VerifyFailed): op_data_greaterthan(ScriptContext(stack=stack, logs=[], extras=Mock())) - def test_data_match_interval(self): + def test_data_match_interval(self) -> None: with self.assertRaises(MissingStackItems): op_data_match_interval([1, b'2']) value0 = struct.pack('!I', 1000) data = (bytes([len(value0)]) + value0) - stack = [data, 0, 'key1', struct.pack('!I', 1000), 'key2', struct.pack('!I', 1005), 'key3', bytes([2])] + stack: Stack = [data, 0, 'key1', struct.pack('!I', 1000), 'key2', struct.pack('!I', 1005), 'key3', bytes([2])] op_data_match_interval(stack) self.assertEqual(stack.pop(), 'key1') self.assertEqual(len(stack), 0) @@ -451,14 +455,14 @@ def test_data_match_interval(self): with self.assertRaises(VerifyFailed): op_data_match_interval(stack) - def test_data_match_value(self): + def test_data_match_value(self) -> None: with self.assertRaises(MissingStackItems): op_data_match_value(ScriptContext(stack=[1, b'2'], logs=[], extras=Mock())) value0 = struct.pack('!I', 1000) data = (bytes([len(value0)]) + value0) - stack = [data, 0, 'key1', struct.pack('!I', 1000), 'key2', struct.pack('!I', 1005), 'key3', bytes([2])] + stack: Stack = [data, 0, 'key1', struct.pack('!I', 1000), 'key2', struct.pack('!I', 1005), 'key3', bytes([2])] op_data_match_value(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 'key2') self.assertEqual(len(stack), 0) @@ -484,7 +488,7 @@ def test_data_match_value(self): with self.assertRaises(VerifyFailed): op_data_match_value(ScriptContext(stack=stack, logs=[], extras=Mock())) - def test_find_p2pkh(self): + def test_find_p2pkh(self) -> None: with self.assertRaises(MissingStackItems): op_find_p2pkh(ScriptContext(stack=[], logs=[], extras=Mock())) @@ -506,34 +510,34 @@ def test_find_p2pkh(self): txin = TxInput(b'dont_care', 0, b'data') # try with just 1 output - stack = [genesis_address] + stack: Stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out_genesis)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs and correct output among them stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out_genesis), TxOutput(1, out3)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs without correct amount output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(2, out_genesis), TxOutput(1, out3)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) with self.assertRaises(VerifyFailed): op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) # several outputs without correct address output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out3)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) with self.assertRaises(VerifyFailed): op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) - def test_greaterthan_timestamp(self): + def test_greaterthan_timestamp(self) -> None: with self.assertRaises(MissingStackItems): op_greaterthan_timestamp(ScriptContext(stack=[], logs=[], extras=Mock())) @@ -542,8 +546,8 @@ def test_greaterthan_timestamp(self): from hathor.transaction import Transaction tx = Transaction() - stack = [struct.pack('!I', timestamp)] - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + stack: Stack = [struct.pack('!I', timestamp)] + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) with self.assertRaises(TimeLocked): tx.timestamp = timestamp - 1 @@ -557,7 +561,7 @@ def test_greaterthan_timestamp(self): op_greaterthan_timestamp(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(len(stack), 0) - def test_checkmultisig(self): + def test_checkmultisig(self) -> None: with self.assertRaises(MissingStackItems): op_checkmultisig(ScriptContext(stack=[], logs=[], extras=Mock())) @@ -569,10 +573,11 @@ def test_checkmultisig(self): tx = Transaction(inputs=[txin], outputs=[txout]) data_to_sign = tx.get_sighash_all() - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) wallet = HDWallet() wallet._manually_initialize() + assert wallet.mnemonic is not None wallet.words = wallet.mnemonic.generate() wallet._manually_initialize() @@ -595,7 +600,7 @@ def test_checkmultisig(self): } # All signatures match - stack = [ + stack: Stack = [ keys[0]['signature'], keys[2]['signature'], 2, keys[0]['pubkey'], keys[1]['pubkey'], keys[2]['pubkey'], 3 ] op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) @@ -672,13 +677,13 @@ def test_checkmultisig(self): with self.assertRaises(MissingStackItems): op_checkmultisig(ScriptContext(stack=stack, logs=[], extras=extras)) - def test_equal(self): + def test_equal(self) -> None: elem = b'a' with self.assertRaises(MissingStackItems): op_equal(ScriptContext(stack=[elem], logs=[], extras=Mock())) # no exception should be raised - stack = [elem, elem] + stack: Stack = [elem, elem] op_equal(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 1) @@ -686,7 +691,9 @@ def test_equal(self): op_equal(ScriptContext(stack=stack, logs=[], extras=Mock())) self.assertEqual(stack.pop(), 0) - def test_integer_opcode(self): + def test_integer_opcode(self) -> None: + stack: Stack + # We have opcodes from OP_0 to OP_16 for i in range(0, 17): stack = [] @@ -700,7 +707,7 @@ def test_integer_opcode(self): with self.assertRaises(ScriptError): op_integer(0x61, stack) - def test_decode_opn(self): + def test_decode_opn(self) -> None: for i in range(0, 17): n = decode_opn(getattr(Opcode, 'OP_{}'.format(i))) self.assertEqual(n, i) @@ -711,9 +718,9 @@ def test_decode_opn(self): with self.assertRaises(InvalidScriptError): _ = decode_opn(0x61) - def test_final_stack(self): + def test_final_stack(self) -> None: # empty stack is invalid - stack = [] + stack: Stack = [] with self.assertRaises(FinalStackInvalid): evaluate_final_stack(stack, []) @@ -734,17 +741,17 @@ def test_final_stack(self): with self.assertRaises(FinalStackInvalid): evaluate_final_stack(stack, []) - def test_get_pushdata(self): + def test_get_pushdata(self) -> None: s = [0] * 10 s.insert(0, len(s)) - self.assertEqual(10, len(get_pushdata(s))) + self.assertEqual(10, len(get_pushdata(bytes(s)))) s = [0] * 100 s.insert(0, len(s)) s.insert(0, Opcode.OP_PUSHDATA1) - self.assertEqual(100, len(get_pushdata(s))) + self.assertEqual(100, len(get_pushdata(bytes(s)))) - def test_p2pkh_base_script(self): + def test_p2pkh_base_script(self) -> None: import base58 addrs = [ @@ -762,7 +769,7 @@ def test_p2pkh_base_script(self): script2 = create_output_script(baddress) self.assertEqual(script2, script.get_script()) - def test_multisig_base_script(self): + def test_multisig_base_script(self) -> None: import base58 addrs = [ @@ -779,7 +786,7 @@ def test_multisig_base_script(self): script2 = create_output_script(baddress) self.assertEqual(script2, script.get_script()) - def test_get_data_bytes(self): + def test_get_data_bytes(self) -> None: value0 = b'value0' value1 = b'vvvalue1' value2 = b'vvvvvalue2' @@ -813,7 +820,7 @@ def test_get_data_bytes(self): with self.assertRaises(OutOfData): get_data_bytes(-1, 1, data0) - def test_get_data_single_byte(self): + def test_get_data_single_byte(self) -> None: """ - return data in `int` if success - OutOfData in case position > data_len @@ -831,7 +838,7 @@ def test_get_data_single_byte(self): with self.assertRaises(OutOfData): get_data_single_byte(-1, data) - def test_get_script_op(self): + def test_get_script_op(self) -> None: """ - pushdata, pushdata1, OP_N, OP_X - OutOfData in case pos > data_len (tested in get_data_single_byte?) @@ -892,7 +899,7 @@ def test_get_script_op(self): # test for pushdata stack pos = i = 0 - stack = [] + stack: Stack = [] while pos < len(data1): opcode, pos = get_script_op(pos, data1, stack) self.assertEqual(stack.pop(), solution1[i]) @@ -923,31 +930,38 @@ def test_get_script_op(self): pos = len(data0) + 1 get_script_op(pos, data0, None) - def test_count_sigops(self): + def test_count_sigops(self) -> None: script_0 = HathorScript() script_1 = HathorScript() script_10 = HathorScript() script_100 = HathorScript() script_0.addOpcode(Opcode.OP_0) - self.assertEqual(count_sigops(script_0.data), 0) + self.assertEqual(self.counter.count_sigops(script_0.data), 0) # script_1.addOpcode(Opcode.OP_10) script_1.addOpcode(Opcode.OP_CHECKSIG) - self.assertEqual(count_sigops(script_1.data), 1) + self.assertEqual(self.counter.count_sigops(script_1.data), 1) # script_10.addOpcode(Opcode.OP_10) script_10.addOpcode(Opcode.OP_CHECKMULTISIG) - self.assertEqual(count_sigops(script_10.data), 10) + self.assertEqual(self.counter.count_sigops(script_10.data), 10) # for i in range(6): script_100.addOpcode(Opcode.OP_16) script_100.addOpcode(Opcode.OP_CHECKMULTISIG) for i in range(4): script_100.addOpcode(Opcode.OP_CHECKSIG) - self.assertEqual(count_sigops(script_100.data), 100) + self.assertEqual(self.counter.count_sigops(script_100.data), 100) + + def test_count_checkdatasigops(self) -> None: + script = HathorScript() + for i in range(10): + script.addOpcode(Opcode.OP_CHECKDATASIG) + self.assertEqual(self.counter.count_sigops(script.data), 10) + self.assertEqual(self.counter_old.count_sigops(script.data), 0) - def test_get_sigops_count(self): + def test_get_sigops_count(self) -> None: multisig_script = MultiSig.create_output_script(BURN_ADDRESS) p2pkh_script = P2PKH.create_output_script(BURN_ADDRESS) @@ -961,8 +975,8 @@ def test_get_sigops_count(self): input_script.pushData(redeem_script.data) # include redeem_script if output is MultiSig - self.assertEqual(get_sigops_count(input_script.data, multisig_script), 10) + self.assertEqual(self.counter.get_sigops_count(input_script.data, multisig_script), 10) # if output is not MultiSig, count only input - self.assertEqual(get_sigops_count(input_script.data, p2pkh_script), 1) + self.assertEqual(self.counter.get_sigops_count(input_script.data, p2pkh_script), 1) # if no output_script, count only input - self.assertEqual(get_sigops_count(input_script.data), 1) + self.assertEqual(self.counter.get_sigops_count(input_script.data), 1) diff --git a/tests/tx/test_stratum.py b/tests/tx/test_stratum.py index 6a2811111..6c7060479 100644 --- a/tests/tx/test_stratum.py +++ b/tests/tx/test_stratum.py @@ -21,7 +21,6 @@ StratumFactory, ) from hathor.transaction.block import Block -from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest @@ -227,13 +226,13 @@ def test_min_share_weight(self): class StratumClientTest(unittest.TestCase): def setUp(self): super().setUp() - storage = TransactionMemoryStorage(settings=self._settings) + storage = self.create_tx_storage() self.block = storage.get_transaction(self._settings.GENESIS_BLOCK_HASH) self.transport = StringTransportWithDisconnection() self.protocol = StratumClient(reactor=self.clock) self.protocol.makeConnection(self.transport) self.job_request_params = { - 'data': self.block.get_header_without_nonce().hex(), + 'data': self.block.get_mining_header_without_nonce().hex(), 'job_id': 'a734d03fe4b64739be2894742f3de20f', 'nonce_size': Block.SERIALIZATION_NONCE_SIZE, 'weight': self.block.weight, diff --git a/tests/tx/test_timelock.py b/tests/tx/test_timelock.py index 638da9038..cf829cf2a 100644 --- a/tests/tx/test_timelock.py +++ b/tests/tx/test_timelock.py @@ -1,4 +1,5 @@ from hathor.crypto.util import decode_address +from hathor.exception import InvalidNewTransaction from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction from hathor.wallet.base_wallet import WalletBalance, WalletInputInfo, WalletOutputInfo @@ -57,11 +58,11 @@ def test_timelock(self): tx2.parents = self.manager.get_new_tx_parents() tx2.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx2) - propagated = self.manager.propagate_tx(tx2) + with self.assertRaises(InvalidNewTransaction): + self.manager.propagate_tx(tx2) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500)) - self.assertFalse(propagated) self.clock.advance(1) @@ -77,7 +78,7 @@ def test_timelock(self): tx3.parents = self.manager.get_new_tx_parents() tx3.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx3) - propagated = self.manager.propagate_tx(tx3, False) + propagated = self.manager.propagate_tx(tx3) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500 - 700)) self.assertTrue(propagated) @@ -97,7 +98,7 @@ def test_timelock(self): tx4.parents = self.manager.get_new_tx_parents() tx4.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx4) - propagated = self.manager.propagate_tx(tx4, False) + propagated = self.manager.propagate_tx(tx4) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens[:3]))) self.assertTrue(propagated) @@ -105,7 +106,7 @@ def test_timelock(self): self.clock.advance(8) tx2.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx2) - propagated = self.manager.propagate_tx(tx2, False) + propagated = self.manager.propagate_tx(tx2) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, sum(blocks_tokens[:3]))) self.assertTrue(propagated) diff --git a/tests/tx/test_tips.py b/tests/tx/test_tips.py index d3f99ef70..b4520bbba 100644 --- a/tests/tx/test_tips.py +++ b/tests/tx/test_tips.py @@ -65,7 +65,7 @@ def test_tips_winner(self): new_block = add_new_block(self.manager, propagate=False) new_block.parents = [new_block.parents[0], tx1.hash, tx3.hash] self.manager.cpu_mining_service.resolve(new_block) - self.manager.propagate_tx(new_block, fails_silently=False) + self.manager.propagate_tx(new_block) self.manager.reactor.advance(10) @@ -133,7 +133,7 @@ def test_tips_twin(self): tx4 = add_new_transactions(self.manager, 1, advance_clock=1, propagate=False)[0] tx4.parents = [tx1.hash, tx2.hash] self.manager.cpu_mining_service.resolve(tx4) - self.manager.propagate_tx(tx4, fails_silently=False) + self.manager.propagate_tx(tx4) self.manager.reactor.advance(10) self.assertCountEqual(self.get_tips(), set([tx4.hash, tx3.hash])) @@ -153,7 +153,7 @@ def test_tips_twin(self): tx6 = add_new_transactions(self.manager, 1, advance_clock=1, propagate=False)[0] tx6.parents = [tx5.hash, tx2.hash] self.manager.cpu_mining_service.resolve(tx6) - self.manager.propagate_tx(tx6, fails_silently=False) + self.manager.propagate_tx(tx6) self.manager.reactor.advance(10) self.assertIsNotNone(tx4.get_metadata(force_reload=True).voided_by) self.assertIsNone(tx5.get_metadata(force_reload=True).voided_by) diff --git a/tests/tx/test_token_validation.py b/tests/tx/test_token_validation.py new file mode 100644 index 000000000..5dd1573e6 --- /dev/null +++ b/tests/tx/test_token_validation.py @@ -0,0 +1,43 @@ + +import pytest + +from hathor.conf.get_settings import get_global_settings +from hathor.transaction.exceptions import TransactionDataError +from hathor.transaction.util import validate_token_name_and_symbol + + +def test_token_name(): + settings = get_global_settings() + + validate_token_name_and_symbol(settings, 'TOKEN', 'TKN') + validate_token_name_and_symbol(settings, 'TOKEN', 'X') + validate_token_name_and_symbol(settings, 'TOKEN', 'XY') + validate_token_name_and_symbol(settings, 'TOKEN', 'XYZ') + validate_token_name_and_symbol(settings, 'TOKEN', 'XYZW') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, '', 'X') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, 'TOKEN', '') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, 'HATHOR', 'X') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, ' HATHOR', 'X') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, ' HATHOR ', 'X') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, 'HATHOR ', 'X') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, 'TOKEN', 'HTR') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, 'TOKEN', ' HTR') + + with pytest.raises(TransactionDataError): + validate_token_name_and_symbol(settings, 'TOKEN', 'HTR ') diff --git a/tests/tx/test_tokens.py b/tests/tx/test_tokens.py index 602938692..858503637 100644 --- a/tests/tx/test_tokens.py +++ b/tests/tx/test_tokens.py @@ -50,7 +50,7 @@ def test_tokens_in_block(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(BlockWithTokensError): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) def test_tx_token_outputs(self): genesis_block = self.genesis_blocks[0] @@ -70,7 +70,7 @@ def test_tx_token_outputs(self): tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # with 1 token uid in list tx.tokens = [bytes.fromhex('0023be91834c973d6a6ddd1a0ae411807b7c8ef2a015afb5177ee64b666ce602')] @@ -80,7 +80,7 @@ def test_tx_token_outputs(self): tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # try hathor authority UTXO output = TxOutput(value, script, 0b10000000) @@ -90,7 +90,7 @@ def test_tx_token_outputs(self): tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_token_transfer(self): wallet = self.manager.wallet @@ -111,7 +111,7 @@ def test_token_transfer(self): tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx2) tx2.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx2) + self.manager.verification_service.verify(tx2, self.verification_params) # missing tokens token_output = TxOutput(utxo.value - 1, script, 1) @@ -121,8 +121,9 @@ def test_token_transfer(self): public_bytes, signature = wallet.get_input_aux_data(data_to_sign, wallet.get_private_key(self.address_b58)) tx3.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx3) + tx3.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx3) + self.manager.verification_service.verify(tx3, self.verification_params) def test_token_mint(self): wallet = self.manager.wallet @@ -189,7 +190,7 @@ def test_token_mint(self): tx3.inputs[0].data = data self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx3) + self.manager.verification_service.verify(tx3, self.verification_params) # try to mint and deposit less tokens than necessary mint_amount = 10000000 @@ -215,7 +216,7 @@ def test_token_mint(self): tx4.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx4) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx4) + self.manager.verification_service.verify(tx4, self.verification_params) # try to mint using melt authority UTXO _input1 = TxInput(tx.hash, 2, b'') @@ -227,7 +228,7 @@ def test_token_mint(self): tx5.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx5) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx5) + self.manager.verification_service.verify(tx5, self.verification_params) def test_token_melt(self): wallet = self.manager.wallet @@ -299,7 +300,7 @@ def test_token_melt(self): tx3.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx3) + self.manager.verification_service.verify(tx3, self.verification_params) # try to melt using mint authority UTXO _input1 = TxInput(tx.hash, 0, b'') @@ -314,7 +315,7 @@ def test_token_melt(self): tx4.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx4) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx4) + self.manager.verification_service.verify(tx4, self.verification_params) def test_token_transfer_authority(self): wallet = self.manager.wallet @@ -333,7 +334,7 @@ def test_token_transfer_authority(self): tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx2) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx2) + self.manager.verification_service.verify(tx2, self.verification_params) # input with melt and output with mint _input1 = TxInput(tx.hash, 2, b'') @@ -345,7 +346,7 @@ def test_token_transfer_authority(self): tx3.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx3) + self.manager.verification_service.verify(tx3, self.verification_params) def test_token_index_with_conflict(self, mint_amount=0): # create a new token and have a mint operation done. The tx that mints the @@ -449,39 +450,39 @@ def update_tx(tx): # max token name length tx.token_name = 'a' * self._settings.MAX_LENGTH_TOKEN_NAME update_tx(tx) - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # max token symbol length tx.token_symbol = 'a' * self._settings.MAX_LENGTH_TOKEN_SYMBOL update_tx(tx) - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # long token name tx.token_name = 'a' * (self._settings.MAX_LENGTH_TOKEN_NAME + 1) update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # long token symbol tx.token_name = 'ValidName' tx.token_symbol = 'a' * (self._settings.MAX_LENGTH_TOKEN_SYMBOL + 1) update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # Hathor token name tx.token_name = self._settings.HATHOR_TOKEN_NAME tx.token_symbol = 'TST' update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # Hathor token symbol tx.token_name = 'Test' tx.token_symbol = self._settings.HATHOR_TOKEN_SYMBOL update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # Token name unicode tx.token_name = 'Test ∞' @@ -489,7 +490,7 @@ def update_tx(tx): token_info = tx.serialize_token_info() TokenCreationTransaction.deserialize_token_info(token_info) update_tx(tx) - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # Token symbol unicode tx.token_name = 'Test Token' @@ -497,7 +498,7 @@ def update_tx(tx): token_info = tx.serialize_token_info() TokenCreationTransaction.deserialize_token_info(token_info) update_tx(tx) - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_token_mint_zero(self): # try to mint 0 tokens @@ -538,7 +539,7 @@ def test_unknown_authority(self): tx2.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx2) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx2) + self.manager.verification_service.verify(tx2, self.verification_params) def test_token_info_serialization(self): tx = create_tokens(self.manager, self.address_b58, mint_amount=500) @@ -591,7 +592,7 @@ def test_block_with_htr_authority(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) def test_voided_token_creation(self): tx1 = create_tokens(self.manager, self.address_b58, mint_amount=500, use_genesis=False) @@ -614,8 +615,3 @@ def test_voided_token_creation(self): self.assertEqual(1, len(melt)) tokens_index = self.manager.tx_storage.indexes.tokens.get_token_info(token_uid) print(tokens_index) - - -@pytest.mark.skipif(unittest.USE_MEMORY_STORAGE, reason='previous tests already use memory, avoid duplication') -class MemoryTokenTest(TokenTest): - use_memory_storage = True diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 6c842d656..5e10e6dcd 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -23,10 +23,10 @@ InvalidInputDataSize, InvalidOutputScriptSize, InvalidOutputValue, - NoInputError, ParentDoesNotExist, PowError, TimestampError, + TooFewInputs, TooManyInputs, TooManyOutputs, TooManySigOps, @@ -47,7 +47,7 @@ def setUp(self): self.wallet = Wallet() # this makes sure we can spend the genesis outputs - self.manager = self.create_peer('testnet', unlock_wallet=True, wallet_index=True, use_memory_storage=True) + self.manager = self.create_peer('testnet', unlock_wallet=True, wallet_index=True) self._verifiers = self.manager.verification_service.verifiers self.tx_storage = self.manager.tx_storage @@ -135,7 +135,7 @@ def test_too_many_inputs(self): def test_no_inputs(self): tx = Transaction(inputs=[], storage=self.tx_storage) - with self.assertRaises(NoInputError): + with self.assertRaises(TooFewInputs): self._verifiers.tx.verify_number_of_inputs(tx) def test_too_many_outputs(self): @@ -219,7 +219,7 @@ def test_block_inputs(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(BlockWithInputs): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) def test_merge_mined_no_magic(self): from hathor.merged_mining import MAGIC_NUMBER @@ -290,7 +290,7 @@ def test_merge_mined_multiple_magic(self): storage=self.tx_storage, ) - assert b1.get_base_hash() != b2.get_base_hash() + assert b1.get_mining_base_hash() != b2.get_mining_base_hash() header_head = b'\x00' * 32 header_tail = b'\x00' * 12 @@ -298,9 +298,9 @@ def test_merge_mined_multiple_magic(self): coinbase_parts = [ b'\x00' * 42, MAGIC_NUMBER, - b1.get_base_hash(), + b1.get_mining_base_hash(), MAGIC_NUMBER, - b2.get_base_hash(), + b2.get_mining_base_hash(), b'\x00' * 18, ] @@ -435,22 +435,23 @@ def test_tx_number_parents(self): # in first test, only with 1 parent self.manager.cpu_mining_service.resolve(tx) + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # test with 3 parents parents = [tx.hash for tx in self.genesis] tx.parents = parents self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # 2 parents, 1 tx and 1 block parents = [self.genesis_txs[0].hash, self.genesis_blocks[0].hash] tx.parents = parents self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_block_unknown_parent(self): address = get_address_from_public_key(self.genesis_public_key) @@ -469,7 +470,7 @@ def test_block_unknown_parent(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(ParentDoesNotExist): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) def test_block_number_parents(self): address = get_address_from_public_key(self.genesis_public_key) @@ -487,7 +488,7 @@ def test_block_number_parents(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) def test_tx_inputs_out_of_range(self): # we'll try to spend output 3 from genesis transaction, which does not exist @@ -510,7 +511,7 @@ def test_tx_inputs_out_of_range(self): # test with an inexistent index self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # now with index equals of len of outputs _input = [TxInput(genesis_block.hash, len(genesis_block.outputs), data)] @@ -518,7 +519,7 @@ def test_tx_inputs_out_of_range(self): # test with an inexistent index self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # now with inexistent tx hash random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -526,7 +527,7 @@ def test_tx_inputs_out_of_range(self): tx.inputs = _input self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_tx_inputs_conflict(self): # the new tx inputs will try to spend the same output @@ -549,7 +550,7 @@ def test_tx_inputs_conflict(self): self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(ConflictingInputs): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_regular_tx(self): # this should succeed @@ -571,7 +572,7 @@ def test_regular_tx(self): self.manager.cpu_mining_service.resolve(tx) tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_tx_weight_too_high(self): parents = [tx.hash for tx in self.genesis_txs] @@ -606,7 +607,7 @@ def test_weight_nan(self): tx.update_hash() self.assertTrue(isnan(tx.weight)) with self.assertRaises(WeightError): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_weight_inf(self): # this should succeed @@ -629,7 +630,7 @@ def test_weight_inf(self): tx.update_hash() self.assertTrue(isinf(tx.weight)) with self.assertRaises(WeightError): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_tx_duplicated_parents(self): # the new tx will confirm the same tx twice @@ -650,8 +651,9 @@ def test_tx_duplicated_parents(self): _input.data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) + tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) with self.assertRaises(DuplicatedParents): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_update_timestamp(self): parents = [tx for tx in self.genesis_txs] @@ -684,26 +686,30 @@ def test_propagation_error(self): # 1. propagate genesis genesis_block = self.genesis_blocks[0] genesis_block.storage = manager.tx_storage - self.assertFalse(manager.propagate_tx(genesis_block)) + with self.assertRaises(InvalidNewTransaction): + manager.propagate_tx(genesis_block) # 2. propagate block with weight 1 block = manager.generate_mining_block() block.weight = 1 self.manager.cpu_mining_service.resolve(block) - self.assertFalse(manager.propagate_tx(block)) + with self.assertRaises(InvalidNewTransaction): + manager.propagate_tx(block) # 3. propagate block with wrong amount of tokens block = manager.generate_mining_block() output = TxOutput(1, block.outputs[0].script) block.outputs = [output] self.manager.cpu_mining_service.resolve(block) - self.assertFalse(manager.propagate_tx(block)) + with self.assertRaises(InvalidNewTransaction): + manager.propagate_tx(block) # 4. propagate block from the future block = manager.generate_mining_block() block.timestamp = int(self.clock.seconds()) + self._settings.MAX_FUTURE_TIMESTAMP_ALLOWED + 100 manager.cpu_mining_service.resolve(block, update_time=False) - self.assertFalse(manager.propagate_tx(block)) + with self.assertRaises(InvalidNewTransaction): + manager.propagate_tx(block) def test_tx_methods(self): blocks = add_new_blocks(self.manager, 2, advance_clock=1) @@ -789,21 +795,17 @@ def add_block_with_data(data: bytes = b'') -> None: assert isinstance(e.value.__cause__, TransactionDataError) def test_output_serialization(self): - from hathor.transaction.base_transaction import ( - _MAX_OUTPUT_VALUE_32, - MAX_OUTPUT_VALUE, - bytes_to_output_value, - output_value_to_bytes, - ) - max_32 = output_value_to_bytes(_MAX_OUTPUT_VALUE_32) + from hathor.serialization.encoding.output_value import MAX_OUTPUT_VALUE_32 + from hathor.transaction.base_transaction import MAX_OUTPUT_VALUE, bytes_to_output_value, output_value_to_bytes + max_32 = output_value_to_bytes(MAX_OUTPUT_VALUE_32) self.assertEqual(len(max_32), 4) value, buf = bytes_to_output_value(max_32) - self.assertEqual(value, _MAX_OUTPUT_VALUE_32) + self.assertEqual(value, MAX_OUTPUT_VALUE_32) - over_32 = output_value_to_bytes(_MAX_OUTPUT_VALUE_32 + 1) + over_32 = output_value_to_bytes(MAX_OUTPUT_VALUE_32 + 1) self.assertEqual(len(over_32), 8) value, buf = bytes_to_output_value(over_32) - self.assertEqual(value, _MAX_OUTPUT_VALUE_32 + 1) + self.assertEqual(value, MAX_OUTPUT_VALUE_32 + 1) max_64 = output_value_to_bytes(MAX_OUTPUT_VALUE) self.assertEqual(len(max_64), 8) @@ -859,7 +861,7 @@ def test_output_value(self): # 'Manually resolving', to validate verify method tx.hash = bytes.fromhex('012cba011be3c29f1c406f9015e42698b97169dbc6652d1f5e4d5c5e83138858') with self.assertRaises(InvalidOutputValue): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # Invalid output value invalid_output = bytes.fromhex('ffffffff') @@ -1095,7 +1097,7 @@ def test_sigops_output_single_above_limit(self) -> None: tx.update_hash() # This calls verify to ensure that verify_sigops_output is being called on verify with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_sigops_output_multi_above_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1108,7 +1110,7 @@ def test_sigops_output_multi_above_limit(self) -> None: tx = Transaction(inputs=[_input], outputs=[output2]*num_outputs, storage=self.tx_storage) tx.update_hash() with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_sigops_output_single_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1145,7 +1147,7 @@ def test_sigops_input_single_above_limit(self) -> None: tx = Transaction(inputs=[input1], outputs=[_output], storage=self.tx_storage) tx.update_hash() with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_sigops_input_multi_above_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1160,7 +1162,7 @@ def test_sigops_input_multi_above_limit(self) -> None: tx = Transaction(inputs=[input2]*num_inputs, outputs=[_output], storage=self.tx_storage) tx.update_hash() with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) def test_sigops_input_single_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] diff --git a/tests/tx/test_tx_deserialization.py b/tests/tx/test_tx_deserialization.py index f467c26f0..6f38f5641 100644 --- a/tests/tx/test_tx_deserialization.py +++ b/tests/tx/test_tx_deserialization.py @@ -29,7 +29,7 @@ def verbose(key, value): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes, verbose=verbose) - self._verification_service.verify_without_storage(tx) + self._verification_service.verify_without_storage(tx, self.verification_params) key, version = v[1] self.assertEqual(key, 'version') diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index ee45c4c5e..5eb5bd51e 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -13,6 +13,7 @@ from hathor.transaction.scripts import P2PKH from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.validation_state import ValidationState +from hathor.verification.verification_params import VerificationParams from tests.unittest import TestBuilder from tests.utils import BURN_ADDRESS, add_blocks_unlock_reward, add_new_transactions, add_new_tx, create_tokens @@ -36,6 +37,7 @@ def setUp(self): self.manager = artifacts.manager self.tx_storage = artifacts.tx_storage self._settings = artifacts.settings + self.verification_params = VerificationParams.default_for_mempool() assert artifacts.wallet is not None @@ -55,7 +57,7 @@ def setUp(self): nonce=100781, storage=self.tx_storage) self.manager.cpu_mining_service.resolve(self.block) self.block.init_static_metadata_from_storage(self._settings, self.tx_storage) - self.manager.verification_service.verify(self.block) + self.manager.verification_service.verify(self.block, self.verification_params) self.block.get_metadata().validation = ValidationState.FULL tx_parents = [tx.hash for tx in self.genesis_txs] @@ -96,7 +98,7 @@ def test_genesis(self): self.assertEqual(1, len(self.genesis_blocks)) self.assertEqual(2, len(self.genesis_txs)) for tx in self.genesis: - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) for tx in self.genesis: tx2 = self.tx_storage.get_transaction(tx.hash) @@ -513,7 +515,7 @@ def _add_new_block(self, parents=None): block.parents = parents block.weight = 10 self.assertTrue(self.manager.cpu_mining_service.resolve(block)) - self.manager.propagate_tx(block, fails_silently=False) + self.manager.propagate_tx(block) self.reactor.advance(5) return block @@ -568,13 +570,6 @@ def handle_error(err): yield gatherResults(deferreds) self.tx_storage._disable_weakref() - def test_full_verification_attribute(self): - self.assertFalse(self.tx_storage.is_running_full_verification()) - self.tx_storage.start_full_verification() - self.assertTrue(self.tx_storage.is_running_full_verification()) - self.tx_storage.finish_full_verification() - self.assertFalse(self.tx_storage.is_running_full_verification()) - def test_key_value_attribute(self): attr = 'test' val = 'a' @@ -606,31 +601,11 @@ def _test_remove_tx_or_block(self, tx): self.assertFalse(self.tx_storage.store.transaction_exists(tx_hash)) -class TransactionMemoryStorageTest(BaseTransactionStorageTest): - __test__ = True - - def _config_builder(self, builder: TestBuilder) -> None: - builder.use_memory() - - -class CacheMemoryStorageTest(BaseCacheStorageTest): - __test__ = True - - def _config_builder(self, builder: TestBuilder) -> None: - builder.use_memory() - builder.use_tx_storage_cache(capacity=5) - - class TransactionRocksDBStorageTest(BaseTransactionStorageTest): __test__ = True def _config_builder(self, builder: TestBuilder) -> None: - self.directory = tempfile.mkdtemp() - builder.use_rocksdb(self.directory) - - def tearDown(self): - shutil.rmtree(self.directory) - super().tearDown() + pass def test_storage_new_blocks(self): self.tx_storage._always_use_topological_dfs = True @@ -641,10 +616,4 @@ class CacheRocksDBStorageTest(BaseCacheStorageTest): __test__ = True def _config_builder(self, builder: TestBuilder) -> None: - self.directory = tempfile.mkdtemp() - builder.use_rocksdb(self.directory) builder.use_tx_storage_cache(capacity=5) - - def tearDown(self): - shutil.rmtree(self.directory) - super().tearDown() diff --git a/tests/tx/test_verification.py b/tests/tx/test_verification.py index 15a6fa3f7..3db56c05e 100644 --- a/tests/tx/test_verification.py +++ b/tests/tx/test_verification.py @@ -113,20 +113,20 @@ def _get_valid_token_creation_tx(self) -> TokenCreationTransaction: def test_block_verify_basic(self) -> None: block = self._get_valid_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.verify_basic(block) + self.manager.verification_service.verify_basic(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() # Block methods verify_weight_wrapped.assert_called_once() @@ -153,7 +153,7 @@ def test_block_verify_without_storage(self) -> None: patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(block) + self.manager.verification_service.verify_without_storage(block, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -170,6 +170,7 @@ def test_block_verify(self) -> None: block = self._get_valid_block() verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) @@ -183,6 +184,7 @@ def test_block_verify(self) -> None: with ( patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), @@ -193,10 +195,11 @@ def test_block_verify(self) -> None: patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() # Block methods verify_pow_wrapped.assert_called_once() @@ -212,20 +215,20 @@ def test_block_verify(self) -> None: def test_block_validate_basic(self) -> None: block = self._get_valid_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.validate_basic(block) + self.manager.verification_service.validate_basic(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() # Block methods verify_weight_wrapped.assert_called_once() @@ -235,7 +238,7 @@ def test_block_validate_basic(self) -> None: self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(block) + self.manager.verification_service.validate_full(block, self.verification_params) self.assertEqual(block.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -246,7 +249,7 @@ def test_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), ): - self.manager.verification_service.validate_basic(block) + self.manager.verification_service.validate_basic(block, self.verification_params) # Block methods verify_weight_wrapped2.assert_not_called() @@ -258,7 +261,8 @@ def test_block_validate_basic(self) -> None: def test_block_validate_full(self) -> None: block = self._get_valid_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) @@ -274,7 +278,8 @@ def test_block_validate_full(self) -> None: verify_mandatory_signaling_wrapped = Mock(wraps=self.verifiers.block.verify_mandatory_signaling) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), @@ -288,10 +293,11 @@ def test_block_validate_full(self) -> None: patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): - self.manager.verification_service.validate_full(block) + self.manager.verification_service.validate_full(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() verify_outputs_wrapped.assert_called_once() # Block methods @@ -310,20 +316,20 @@ def test_block_validate_full(self) -> None: def test_merge_mined_block_verify_basic(self) -> None: block = self._get_valid_merge_mined_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.verify_basic(block) + self.manager.verification_service.verify_basic(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() # Block methods verify_weight_wrapped.assert_called_once() @@ -350,7 +356,7 @@ def test_merge_mined_block_verify_without_storage(self) -> None: patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(block) + self.manager.verification_service.verify_without_storage(block, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -367,6 +373,7 @@ def test_merge_mined_block_verify(self) -> None: block = self._get_valid_merge_mined_block() verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) verify_no_inputs_wrapped = Mock(wraps=self.verifiers.block.verify_no_inputs) @@ -382,6 +389,7 @@ def test_merge_mined_block_verify(self) -> None: with ( patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), patch.object(BlockVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), @@ -393,10 +401,11 @@ def test_merge_mined_block_verify(self) -> None: patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), ): - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() # Block methods verify_pow_wrapped.assert_called_once() @@ -415,20 +424,20 @@ def test_merge_mined_block_verify(self) -> None: def test_merge_mined_block_validate_basic(self) -> None: block = self._get_valid_merge_mined_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_weight_wrapped = Mock(wraps=self.verifiers.block.verify_weight) verify_reward_wrapped = Mock(wraps=self.verifiers.block.verify_reward) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.validate_basic(block) + self.manager.verification_service.validate_basic(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() # Block methods verify_weight_wrapped.assert_called_once() @@ -438,7 +447,7 @@ def test_merge_mined_block_validate_basic(self) -> None: self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(block) + self.manager.verification_service.validate_full(block, self.verification_params) self.assertEqual(block.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -449,7 +458,7 @@ def test_merge_mined_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), ): - self.manager.verification_service.validate_basic(block) + self.manager.verification_service.validate_basic(block, self.verification_params) # Block methods verify_weight_wrapped2.assert_not_called() @@ -461,7 +470,8 @@ def test_merge_mined_block_validate_basic(self) -> None: def test_merge_mined_block_validate_full(self) -> None: block = self._get_valid_merge_mined_block() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) @@ -479,7 +489,8 @@ def test_merge_mined_block_validate_full(self) -> None: verify_aux_pow_wrapped = Mock(wraps=self.verifiers.merge_mined_block.verify_aux_pow) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(BlockVerifier, 'verify_no_inputs', verify_no_inputs_wrapped), @@ -494,10 +505,11 @@ def test_merge_mined_block_validate_full(self) -> None: patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), ): - self.manager.verification_service.validate_full(block) + self.manager.verification_service.validate_full(block, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() verify_outputs_wrapped.assert_called_once() # Block methods @@ -519,7 +531,7 @@ def test_merge_mined_block_validate_full(self) -> None: def test_transaction_verify_basic(self) -> None: tx = self._get_valid_tx() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) @@ -531,7 +543,7 @@ def test_transaction_verify_basic(self) -> None: verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), @@ -541,10 +553,10 @@ def test_transaction_verify_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_basic(tx) + self.manager.verification_service.verify_basic(tx, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() verify_outputs_wrapped.assert_called_once() # Transaction methods @@ -575,7 +587,7 @@ def test_transaction_verify_without_storage(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(tx) + self.manager.verification_service.verify_without_storage(tx, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -592,6 +604,7 @@ def test_transaction_verify(self) -> None: tx = self._get_valid_tx() verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) @@ -604,9 +617,11 @@ def test_transaction_verify(self) -> None: verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + verify_tx_version_wrapped = Mock(wraps=self.verifiers.tx.verify_version) with ( patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), @@ -618,11 +633,13 @@ def test_transaction_verify(self) -> None: patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + patch.object(TransactionVerifier, 'verify_version', verify_tx_version_wrapped), ): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() # Transaction methods verify_pow_wrapped.assert_called_once() @@ -636,13 +653,14 @@ def test_transaction_verify(self) -> None: verify_parents_wrapped.assert_called_once() verify_sum_wrapped.assert_called_once() verify_reward_locked_wrapped.assert_called_once() + verify_tx_version_wrapped.assert_called_once() def test_transaction_validate_basic(self) -> None: # add enough blocks so that it can be fully validated later on the tests add_blocks_unlock_reward(self.manager) tx = self._get_valid_tx() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) @@ -654,7 +672,7 @@ def test_transaction_validate_basic(self) -> None: verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), @@ -664,10 +682,10 @@ def test_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.validate_basic(tx) + self.manager.verification_service.validate_basic(tx, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() verify_outputs_wrapped.assert_called_once() # Transaction methods @@ -683,7 +701,7 @@ def test_transaction_validate_basic(self) -> None: self.assertEqual(tx.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(tx) + self.manager.verification_service.validate_full(tx, self.verification_params) self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -704,7 +722,7 @@ def test_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), ): - self.manager.verification_service.validate_basic(tx) + self.manager.verification_service.validate_basic(tx, self.verification_params) # Transaction methods verify_parents_basic_wrapped2.assert_not_called() @@ -722,7 +740,8 @@ def test_transaction_validate_full(self) -> None: add_blocks_unlock_reward(self.manager) tx = self._get_valid_tx() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) @@ -738,9 +757,11 @@ def test_transaction_validate_full(self) -> None: verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + verify_tx_version_wrapped = Mock(wraps=self.verifiers.tx.verify_version) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), @@ -755,11 +776,13 @@ def test_transaction_validate_full(self) -> None: patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + patch.object(TransactionVerifier, 'verify_version', verify_tx_version_wrapped), ): - self.manager.verification_service.validate_full(tx) + self.manager.verification_service.validate_full(tx, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() assert verify_outputs_wrapped.call_count == 2 # Transaction methods @@ -776,6 +799,7 @@ def test_transaction_validate_full(self) -> None: verify_parents_wrapped.assert_called_once() verify_sum_wrapped.assert_called_once() verify_reward_locked_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() # validation should be FULL self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) @@ -798,7 +822,7 @@ def test_transaction_validate_full(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), ): - self.manager.verification_service.validate_basic(tx) + self.manager.verification_service.validate_basic(tx, self.verification_params) # Transaction methods verify_parents_basic_wrapped2.assert_not_called() @@ -815,7 +839,7 @@ def test_transaction_validate_full(self) -> None: def test_token_creation_transaction_verify_basic(self) -> None: tx = self._get_valid_token_creation_tx() - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) @@ -827,7 +851,7 @@ def test_token_creation_transaction_verify_basic(self) -> None: verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), @@ -837,10 +861,10 @@ def test_token_creation_transaction_verify_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_basic(tx) + self.manager.verification_service.verify_basic(tx, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() verify_outputs_wrapped.assert_called_once() # Transaction methods @@ -871,7 +895,7 @@ def test_token_creation_transaction_verify_without_storage(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(tx) + self.manager.verification_service.verify_without_storage(tx, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -887,6 +911,7 @@ def test_token_creation_transaction_verify(self) -> None: tx = self._get_valid_token_creation_tx() verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_pow_wrapped = Mock(wraps=self.verifiers.vertex.verify_pow) verify_number_of_inputs_wrapped = Mock(wraps=self.verifiers.tx.verify_number_of_inputs) @@ -899,12 +924,14 @@ def test_token_creation_transaction_verify(self) -> None: verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + verify_tx_version_wrapped = Mock(wraps=self.verifiers.tx.verify_version) verify_token_info_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_token_info) verify_minted_tokens_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_minted_tokens) with ( patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_pow', verify_pow_wrapped), patch.object(TransactionVerifier, 'verify_number_of_inputs', verify_number_of_inputs_wrapped), patch.object(TransactionVerifier, 'verify_output_token_indexes', verify_output_token_indexes_wrapped), @@ -916,13 +943,15 @@ def test_token_creation_transaction_verify(self) -> None: patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + patch.object(TransactionVerifier, 'verify_version', verify_tx_version_wrapped), patch.object(TokenCreationTransactionVerifier, 'verify_token_info', verify_token_info_wrapped), patch.object(TokenCreationTransactionVerifier, 'verify_minted_tokens', verify_minted_tokens_wrapped), ): - self.manager.verification_service.verify(tx) + self.manager.verification_service.verify(tx, self.verification_params) # Vertex methods verify_outputs_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() # Transaction methods verify_pow_wrapped.assert_called_once() @@ -936,6 +965,7 @@ def test_token_creation_transaction_verify(self) -> None: verify_parents_wrapped.assert_called_once() verify_sum_wrapped.assert_called_once() verify_reward_locked_wrapped.assert_called_once() + verify_tx_version_wrapped.assert_called_once() # TokenCreationTransaction methods verify_token_info_wrapped.assert_called_once() @@ -945,7 +975,7 @@ def test_token_creation_transaction_validate_basic(self) -> None: tx = self._get_valid_token_creation_tx() tx.get_metadata().validation = ValidationState.INITIAL - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) @@ -957,7 +987,7 @@ def test_token_creation_transaction_validate_basic(self) -> None: verify_sigops_output_wrapped = Mock(wraps=self.verifiers.vertex.verify_sigops_output) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), @@ -967,10 +997,10 @@ def test_token_creation_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.validate_basic(tx) + self.manager.verification_service.validate_basic(tx, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() verify_outputs_wrapped.assert_called_once() # Transaction methods @@ -986,7 +1016,7 @@ def test_token_creation_transaction_validate_basic(self) -> None: self.assertEqual(tx.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(tx) + self.manager.verification_service.validate_full(tx, self.verification_params) self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -1007,7 +1037,7 @@ def test_token_creation_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), ): - self.manager.verification_service.validate_basic(tx) + self.manager.verification_service.validate_basic(tx, self.verification_params) # Transaction methods verify_parents_basic_wrapped2.assert_not_called() @@ -1025,7 +1055,8 @@ def test_token_creation_transaction_validate_full(self) -> None: tx = self._get_valid_token_creation_tx() tx.get_metadata().validation = ValidationState.INITIAL - verify_version_wrapped = Mock(wraps=self.verifiers.vertex.verify_version) + verify_version_basic_wrapped = Mock(wraps=self.verifiers.vertex.verify_version_basic) + verify_headers_wrapped = Mock(wraps=self.verifiers.vertex.verify_headers) verify_outputs_wrapped = Mock(wraps=self.verifiers.vertex.verify_outputs) verify_parents_basic_wrapped = Mock(wraps=self.verifiers.tx.verify_parents_basic) @@ -1041,12 +1072,14 @@ def test_token_creation_transaction_validate_full(self) -> None: verify_parents_wrapped = Mock(wraps=self.verifiers.vertex.verify_parents) verify_sum_wrapped = Mock(wraps=self.verifiers.tx.verify_sum) verify_reward_locked_wrapped = Mock(wraps=self.verifiers.tx.verify_reward_locked) + verify_tx_version_wrapped = Mock(wraps=self.verifiers.tx.verify_version) verify_token_info_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_token_info) verify_minted_tokens_wrapped = Mock(wraps=self.verifiers.token_creation_tx.verify_minted_tokens) with ( - patch.object(VertexVerifier, 'verify_version', verify_version_wrapped), + patch.object(VertexVerifier, 'verify_version_basic', verify_version_basic_wrapped), + patch.object(VertexVerifier, 'verify_headers', verify_headers_wrapped), patch.object(VertexVerifier, 'verify_outputs', verify_outputs_wrapped), patch.object(TransactionVerifier, 'verify_parents_basic', verify_parents_basic_wrapped), patch.object(TransactionVerifier, 'verify_weight', verify_weight_wrapped), @@ -1061,13 +1094,15 @@ def test_token_creation_transaction_validate_full(self) -> None: patch.object(VertexVerifier, 'verify_parents', verify_parents_wrapped), patch.object(TransactionVerifier, 'verify_sum', verify_sum_wrapped), patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), + patch.object(TransactionVerifier, 'verify_version', verify_tx_version_wrapped), patch.object(TokenCreationTransactionVerifier, 'verify_token_info', verify_token_info_wrapped), patch.object(TokenCreationTransactionVerifier, 'verify_minted_tokens', verify_minted_tokens_wrapped), ): - self.manager.verification_service.validate_full(tx) + self.manager.verification_service.validate_full(tx, self.verification_params) # Vertex methods - verify_version_wrapped.assert_called_once() + verify_version_basic_wrapped.assert_called_once() + verify_headers_wrapped.assert_called_once() assert verify_outputs_wrapped.call_count == 2 # Transaction methods @@ -1084,6 +1119,7 @@ def test_token_creation_transaction_validate_full(self) -> None: verify_parents_wrapped.assert_called_once() verify_sum_wrapped.assert_called_once() verify_reward_locked_wrapped.assert_called_once() + verify_tx_version_wrapped.assert_called_once() # TokenCreationTransaction methods verify_token_info_wrapped.assert_called_once() diff --git a/tests/unittest.py b/tests/unittest.py index 0992b636b..6c67dccd4 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -1,8 +1,11 @@ +import base64 import os +import re import secrets import shutil import tempfile import time +from contextlib import contextmanager from typing import Any, Callable, Collection, Iterable, Iterator, Optional from unittest import main as ut_main @@ -14,26 +17,27 @@ from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode -from hathor.dag_builder import DAGBuilder from hathor.event import EventManager from hathor.event.storage import EventStorage from hathor.manager import HathorManager +from hathor.nanocontracts.nc_exec_logs import NCLogConfig from hathor.p2p.peer import PrivatePeer from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.pubsub import PubSubManager from hathor.reactor import ReactorProtocol as Reactor, get_global_reactor from hathor.simulator.clock import MemoryReactorHeapClock +from hathor.storage import RocksDBStorage from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.types import VertexId -from hathor.util import Random, not_none -from hathor.wallet import BaseWallet, HDWallet, Wallet +from hathor.util import Random, initialize_hd_wallet, not_none +from hathor.verification.verification_params import VerificationParams +from hathor.wallet import BaseWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock -from tests.utils import GENESIS_SEED +from tests.utils import DEFAULT_WORDS logger = get_logger() main = ut_main -USE_MEMORY_STORAGE = os.environ.get('HATHOR_TEST_MEMORY_STORAGE', 'false').lower() == 'true' def short_hashes(container: Collection[bytes]) -> Iterable[str]: @@ -62,6 +66,16 @@ def _get_default_peer_id_pool_filepath() -> str: PEER_ID_POOL = list(_load_peer_pool()) +OCB_TEST_PRIVKEY: bytes = base64.b64decode( + 'MIH0MF8GCSqGSIb3DQEFDTBSMDEGCSqGSIb3DQEFDDAkBBCIdovnmKjK3KU' + 'c61YGgja0AgIIADAMBggqhkiG9w0CCQUAMB0GCWCGSAFlAwQBKgQQl2CJT4' + 'I2IUzRNoU9hyOWEwSBkLznN9Nunel+kK0FXpk//z0ZAnIyVacfHklCxFGyO' + 'j1VSjor0CHzH2Gmblvr+m7lCmRmqSVAwJpplqQYdBUF6sR9djHLY6svPY0o' + '//dqQ/xM7QiY2FHlb3JQCTu7DaMflqPcJXlRXAFyoACnmj4/lUJWgrcWala' + 'rCSI+8rIillg3AU8/2gfoB1BxulVIIG35SQ==' +) +OCB_TEST_PASSWORD: bytes = b'OCBtestPW' + class TestBuilder(Builder): __test__ = False @@ -91,7 +105,6 @@ def _get_reactor(self) -> Reactor: class TestCase(unittest.TestCase): - use_memory_storage: bool = USE_MEMORY_STORAGE seed_config: Optional[int] = None def setUp(self) -> None: @@ -106,6 +119,7 @@ def setUp(self) -> None: self.rng = Random(self.seed) self._pending_cleanups: list[Callable[..., Any]] = [] self._settings = get_global_settings() + self.verification_params = VerificationParams.default_for_mempool() def tearDown(self) -> None: self.clean_tmpdirs() @@ -151,20 +165,8 @@ def _create_test_wallet(self, unlocked: bool = False) -> Wallet: wallet.lock() return wallet - def get_dag_builder(self, manager: HathorManager) -> DAGBuilder: - genesis_wallet = HDWallet(words=GENESIS_SEED) - genesis_wallet._manually_initialize() - - return DAGBuilder( - settings=manager._settings, - daa=manager.daa, - genesis_wallet=genesis_wallet, - wallet_factory=self.get_wallet, - vertex_resolver=lambda x: manager.cpu_mining_service.resolve(x), - ) - - def get_builder(self) -> TestBuilder: - builder = TestBuilder() + def get_builder(self, settings: HathorSettings | None = None) -> TestBuilder: + builder = TestBuilder(settings) builder.set_rng(self.rng) \ .set_reactor(self.clock) return builder @@ -194,23 +196,21 @@ def create_peer( # type: ignore[no-untyped-def] unlock_wallet: bool = True, wallet_index: bool = False, capabilities: list[str] | None = None, - full_verification: bool = True, checkpoints: list[Checkpoint] | None = None, utxo_index: bool = False, event_manager: EventManager | None = None, - use_memory_index: bool | None = None, start_manager: bool = True, pubsub: PubSubManager | None = None, event_storage: EventStorage | None = None, enable_event_queue: bool | None = None, - use_memory_storage: bool | None = None, enable_ipv6: bool = False, disable_ipv4: bool = False, + nc_indexes: bool = False, + nc_log_config: NCLogConfig | None = None, ): # TODO: Add -> HathorManager here. It breaks the lint in a lot of places. settings = self._settings._replace(NETWORK_NAME=network) builder = self.get_builder() \ - .set_full_verification(full_verification) \ .set_settings(settings) if checkpoints is not None: @@ -239,25 +239,15 @@ def create_peer( # type: ignore[no-untyped-def] if enable_event_queue: builder.enable_event_queue() - if tx_storage is not None: - builder.set_tx_storage(tx_storage) - - if use_memory_storage or self.use_memory_storage: - builder.use_memory() - else: - directory = tempfile.mkdtemp() - self.tmpdirs.append(directory) - builder.use_rocksdb(directory) - - if use_memory_index is True: - builder.force_memory_index() - if wallet_index: builder.enable_wallet_index() if utxo_index: builder.enable_utxo_index() + if tx_storage is not None: + builder.set_tx_storage(tx_storage) + if capabilities is not None: builder.set_capabilities(capabilities) @@ -269,10 +259,25 @@ def create_peer( # type: ignore[no-untyped-def] daa = DifficultyAdjustmentAlgorithm(settings=self._settings, test_mode=TestMode.TEST_ALL_WEIGHT) builder.set_daa(daa) + + if nc_indexes: + builder.enable_nc_indexes() + + if nc_log_config: + builder.set_nc_log_config(nc_log_config) + manager = self.create_peer_from_builder(builder, start_manager=start_manager) return manager + def create_tx_storage(self, settings: HathorSettings | None = None) -> TransactionStorage: + artifacts = self.get_builder(settings).build() + return artifacts.tx_storage + + def create_rocksdb_storage(self, settings: HathorSettings | None = None) -> RocksDBStorage: + artifacts = self.get_builder(settings).build() + return not_none(artifacts.rocksdb_storage) + def run_to_completion(self) -> None: """ This will advance the test's clock until all calls scheduled are done. """ @@ -443,6 +448,24 @@ def assertSyncedProgress(self, node_sync: NodeBlockSync) -> None: def assertV2SyncedProgress(self, node_sync: NodeBlockSync) -> None: self.assertEqual(node_sync.synced_block, node_sync.peer_best_block) + @contextmanager + def assertNCFail(self, class_name: str, pattern: str | re.Pattern[str] | None = None) -> Iterator[BaseException]: + """Assert that a NCFail is raised and it has the expected class name and str(exc) format. + """ + from hathor.nanocontracts.exception import NCFail + + with self.assertRaises(NCFail) as cm: + yield cm + + self.assertEqual(cm.exception.__class__.__name__, class_name) + + if pattern is not None: + actual = str(cm.exception) + if isinstance(pattern, re.Pattern): + assert pattern.match(actual) + else: + self.assertEqual(pattern, actual) + def clean_tmpdirs(self) -> None: for tmpdir in self.tmpdirs: shutil.rmtree(tmpdir) @@ -488,18 +511,10 @@ def clean_pending(self, required_to_quiesce: bool = True) -> None: if required_to_quiesce and active: self.fail('Reactor was still active when it was required to be quiescent.') - def get_wallet(self) -> HDWallet: - words = ('bind daring above film health blush during tiny neck slight clown salmon ' - 'wine brown good setup later omit jaguar tourist rescue flip pet salute') - - hd = HDWallet(words=words) - hd._manually_initialize() - return hd - def get_address(self, index: int) -> Optional[str]: """ Generate a fixed HD Wallet and return an address """ - hd = self.get_wallet() + hd = initialize_hd_wallet(DEFAULT_WORDS) if index >= hd.gap_limit: return None diff --git a/tests/utils.py b/tests/utils.py index 5be5c8510..2a61184e5 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -5,7 +5,7 @@ import time import urllib.parse from dataclasses import dataclass -from typing import Any, Optional +from typing import Any, Optional, cast import requests from cryptography.hazmat.backends import default_backend @@ -26,12 +26,18 @@ from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.util import get_deposit_amount from hathor.util import Random +from hathor.verification.verification_params import VerificationParams settings = HathorSettings() # useful for adding blocks to a different wallet BURN_ADDRESS = bytes.fromhex('28acbfb94571417423c1ed66f706730c4aea516ac5762cccb8') +DEFAULT_WORDS: str = ( + 'bind daring above film health blush during tiny neck slight clown salmon ' + 'wine brown good setup later omit jaguar tourist rescue flip pet salute' +) + def resolve_block_bytes(*, block_bytes: bytes, cpu_mining_service: CpuMiningService) -> bytes: """ From block bytes we create a block and resolve pow @@ -45,19 +51,62 @@ def resolve_block_bytes(*, block_bytes: bytes, cpu_mining_service: CpuMiningServ return block.get_struct() -def add_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, int]], *, n_outputs: int = 1, - base_parent: Optional[Transaction] = None, weight: Optional[float] = None, - resolve: bool = False, address: Optional[str] = None, inc_timestamp: int = 0) -> Transaction: +def add_custom_tx( + manager: HathorManager, + tx_inputs: list[tuple[BaseTransaction, int]], + *, + n_outputs: int = 1, + base_parent: Optional[Transaction] = None, + weight: Optional[float] = None, + resolve: bool = False, + address: Optional[str] = None, + inc_timestamp: int = 0 +) -> Transaction: """Add a custom tx based on the gen_custom_tx(...) method.""" - tx = gen_custom_tx(manager, tx_inputs, n_outputs=n_outputs, base_parent=base_parent, weight=weight, - resolve=resolve, address=address, inc_timestamp=inc_timestamp) - manager.propagate_tx(tx, fails_silently=False) + tx = gen_custom_tx(manager, + tx_inputs, + n_outputs=n_outputs, + base_parent=base_parent, + weight=weight, + resolve=resolve, + address=address, + inc_timestamp=inc_timestamp) + manager.propagate_tx(tx) return tx -def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, int]], *, n_outputs: int = 1, - base_parent: Optional[Transaction] = None, weight: Optional[float] = None, - resolve: bool = False, address: Optional[str] = None, inc_timestamp: int = 0) -> Transaction: +def gen_custom_tx(manager: HathorManager, + tx_inputs: list[tuple[BaseTransaction, int]], + *, + n_outputs: int = 1, + base_parent: Optional[Transaction] = None, + weight: Optional[float] = None, + resolve: bool = False, + address: Optional[str] = None, + inc_timestamp: int = 0) -> Transaction: + """Generate a custom tx based on the inputs and outputs. It gives full control to the + inputs and can be used to generate conflicts and specific patterns in the DAG.""" + tx = gen_custom_base_tx(manager, + tx_inputs, + n_outputs=n_outputs, + base_parent=base_parent, + weight=weight, + resolve=resolve, + address=address, + inc_timestamp=inc_timestamp) + return cast(Transaction, tx) + + +def gen_custom_base_tx(manager: HathorManager, + tx_inputs: list[tuple[BaseTransaction, int]], + *, + n_outputs: int = 1, + base_parent: Optional[Transaction] = None, + weight: Optional[float] = None, + resolve: bool = False, + address: Optional[str] = None, + inc_timestamp: int = 0, + cls: type[BaseTransaction] = Transaction) -> BaseTransaction: """Generate a custom tx based on the inputs and outputs. It gives full control to the inputs and can be used to generate conflicts and specific patterns in the DAG.""" wallet = manager.wallet @@ -95,7 +144,7 @@ def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, else: raise NotImplementedError - tx2 = wallet.prepare_transaction(Transaction, inputs, outputs) + tx2 = wallet.prepare_transaction(cls, inputs, outputs) tx2.storage = manager.tx_storage tx2.timestamp = max(tx_base.timestamp + 1, int(manager.reactor.seconds())) @@ -122,7 +171,7 @@ def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, def add_new_double_spending(manager: HathorManager, *, use_same_parents: bool = False, tx: Optional[Transaction] = None, weight: float = 1) -> Transaction: tx = gen_new_double_spending(manager, use_same_parents=use_same_parents, tx=tx, weight=weight) - manager.propagate_tx(tx, fails_silently=False) + manager.propagate_tx(tx) return tx @@ -149,7 +198,7 @@ def add_new_tx( """ tx = gen_new_tx(manager, address, value) if propagate: - manager.propagate_tx(tx, fails_silently=False) + manager.propagate_tx(tx) if advance_clock: manager.reactor.advance(advance_clock) # type: ignore[attr-defined] return tx @@ -219,7 +268,7 @@ def run_server( """ command = ' '.join([ 'python -m hathor run_node', - '--memory-storage', + '--temp-data', '--wallet hd', '--wallet-enable-api', '--hostname {}'.format(hostname), @@ -411,6 +460,7 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m change_output: Optional[TxOutput] parents: list[bytes] + timestamp: int | None = None if use_genesis: genesis_hash = genesis_block.hash assert genesis_hash is not None @@ -425,6 +475,7 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m block = add_new_block(manager, advance_clock=1, address=address) deposit_input.append(TxInput(block.hash, 0, b'')) total_reward += block.outputs[0].value + timestamp = block.timestamp + 1 if total_reward > deposit_amount: change_output = TxOutput(total_reward - deposit_amount, script, 0) @@ -432,7 +483,7 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m change_output = None add_blocks_unlock_reward(manager) - timestamp = int(manager.reactor.seconds()) + assert timestamp is not None parents = manager.get_new_tx_parents(timestamp) outputs = [] @@ -472,7 +523,7 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m manager.cpu_mining_service.resolve(tx) if propagate: - manager.propagate_tx(tx, fails_silently=False) + manager.propagate_tx(tx) assert isinstance(manager.reactor, Clock) manager.reactor.advance(8) return tx @@ -560,8 +611,9 @@ def add_tx_with_data_script(manager: 'HathorManager', data: list[str], propagate manager.cpu_mining_service.resolve(tx) if propagate: - manager.verification_service.verify(tx) - manager.propagate_tx(tx, fails_silently=False) + params = VerificationParams.default_for_mempool() + manager.verification_service.verify(tx, params) + manager.propagate_tx(tx) assert isinstance(manager.reactor, Clock) manager.reactor.advance(8) @@ -582,6 +634,7 @@ class EventMocker: inputs=[], outputs=[], parents=[], + headers=[], tokens=[], metadata=TxMetadata( hash='abc', diff --git a/tests/wallet/test_wallet.py b/tests/wallet/test_wallet.py index 48d1890b8..73d175680 100644 --- a/tests/wallet/test_wallet.py +++ b/tests/wallet/test_wallet.py @@ -207,7 +207,7 @@ def test_create_token_transaction(self): tx2.parents = self.manager.get_new_tx_parents() self.manager.cpu_mining_service.resolve(tx2) tx2.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx2) + self.manager.verification_service.verify(tx2, self.verification_params) self.assertNotEqual(len(tx2.inputs), 0) token_dict = defaultdict(int) @@ -265,6 +265,6 @@ def test_maybe_spent_txs(self): tx2.weight = 1 tx2.timestamp = blocks[-1].timestamp + 1 self.manager.cpu_mining_service.resolve(tx2) - self.assertTrue(self.manager.on_new_tx(tx2, fails_silently=False)) + self.assertTrue(self.manager.on_new_tx(tx2)) self.clock.advance(2) self.assertEqual(0, len(w.maybe_spent_txs[self._settings.HATHOR_TOKEN_UID])) diff --git a/tests/wallet/test_wallet_hd.py b/tests/wallet/test_wallet_hd.py index 398b3767b..e973f89a8 100644 --- a/tests/wallet/test_wallet_hd.py +++ b/tests/wallet/test_wallet_hd.py @@ -27,7 +27,7 @@ def test_transaction_and_balance(self): new_address = self.wallet.get_unused_address() out = WalletOutputInfo(decode_address(new_address), self.TOKENS, timelock=None) block = add_new_block(self.manager) - self.manager.verification_service.verify(block) + self.manager.verification_service.verify(block, self.verification_params) utxo = self.wallet.unspent_txs[self._settings.HATHOR_TOKEN_UID].get((block.hash, 0)) self.assertIsNotNone(utxo) self.assertEqual(self.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.BLOCK_TOKENS))