diff --git a/.github/actions/setup-hathor-env/action.yml b/.github/actions/setup-hathor-env/action.yml index 2cc18ece9..5cb82ced3 100644 --- a/.github/actions/setup-hathor-env/action.yml +++ b/.github/actions/setup-hathor-env/action.yml @@ -30,7 +30,8 @@ runs: run: | brew cleanup -q # brew update -q - brew install -q graphviz rocksdb pkg-config + brew install -q graphviz rocksdb pkg-config openssl + echo "PYCOIN_LIBCRYPTO_PATH=$(brew --prefix openssl)/lib/libcrypto.dylib" >> $GITHUB_ENV shell: bash - name: Install Poetry dependencies diff --git a/.github/workflows/lib.yml b/.github/workflows/lib.yml new file mode 100644 index 000000000..06ed721bf --- /dev/null +++ b/.github/workflows/lib.yml @@ -0,0 +1,48 @@ +# yamllint disable rule:line-length +name: lib_tests +on: # yamllint disable-line rule:truthy + push: + branches: + - master + - dev + tags: + - v* + pull_request: + branches: + - master + - dev +jobs: + test: + name: python-${{ matrix.python }} (${{ matrix.os }}) + runs-on: ${{ matrix.os }} + defaults: + run: + working-directory: ./hathorlib + timeout-minutes: 40 # default is 360 + strategy: + matrix: + python: + - "3.11" + - "3.12" + os: + - ubuntu-22.04 + - macos-15 + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python }} + - name: Install Poetry + run: pip install 'poetry<2' + - name: Install Poetry dependencies + run: poetry install -n --no-root -E client + - name: Run linters + run: poetry run make check + - name: Run tests + run: poetry run make tests + continue-on-error: ${{ matrix.tier > 1 }} + - name: Upload coverage + uses: codecov/codecov-action@29386c70ef20e286228c72b668a06fd0e8399192 # https://github.com/codecov/codecov-action/releases/tag/v1 + if: matrix.python == 3.11 && startsWith(matrix.os, 'ubuntu') diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 687ec9317..2e3b539d4 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -23,9 +23,9 @@ jobs: import os import json full_matrix = { - 'python': ['3.11', '3.12'], + 'python': ['3.11', '3.12', '3.13'], # available OS's: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on - 'os': ['ubuntu-22.04', 'macos-13'], + 'os': ['ubuntu-22.04', 'macos-15'], } # this is the fastest one: reduced_matrix = { diff --git a/.yamllint.yml b/.yamllint.yml index 077d1e328..880ef028f 100644 --- a/.yamllint.yml +++ b/.yamllint.yml @@ -1,5 +1,8 @@ extends: default +ignore: + - .venv/ + rules: document-start: disable line-length: diff --git a/Dockerfile b/Dockerfile index 763a28703..2eaa39e96 100644 --- a/Dockerfile +++ b/Dockerfile @@ -16,6 +16,7 @@ RUN pip --no-input --no-cache-dir install --upgrade pip wheel poetry ENV POETRY_VIRTUALENVS_IN_PROJECT=true WORKDIR /app/ COPY pyproject.toml poetry.lock ./ +COPY hathorlib ./hathorlib RUN poetry install -n -E sentry --no-root --only=main COPY hathor ./hathor COPY hathor_cli ./hathor_cli diff --git a/extras/github/docker.py b/extras/github/docker.py index ddc7bcadd..e9750f2da 100644 --- a/extras/github/docker.py +++ b/extras/github/docker.py @@ -136,7 +136,7 @@ def extract_pyver(filename): output['tags'] = 'dont-push--local-only' output['push'] = 'false' - output['created'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') + output['created'] = datetime.datetime.now(datetime.UTC).strftime('%Y-%m-%dT%H:%M:%SZ') output['dockerfile'] = dockerfile return output diff --git a/hathor/__init__.py b/hathor/__init__.py index 9ae995799..aa6b35d5a 100644 --- a/hathor/__init__.py +++ b/hathor/__init__.py @@ -39,7 +39,7 @@ TxOutputScript, VertexId, ) -from hathor.nanocontracts.utils import sha3, verify_ecdsa +from hathor.nanocontracts.utils import json_dumps, sha3, verify_ecdsa from hathor.version import __version__ __all__ = [ @@ -73,5 +73,6 @@ 'VertexId', 'sha3', 'verify_ecdsa', + 'json_dumps', '__version__', ] diff --git a/hathor/_openapi/openapi_base.json b/hathor/_openapi/openapi_base.json index 51e0fc372..fda1d1c46 100644 --- a/hathor/_openapi/openapi_base.json +++ b/hathor/_openapi/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.68.4" + "version": "0.69.0" }, "consumes": [ "application/json" diff --git a/hathor/api_util.py b/hathor/api_util.py index b31e6acfa..0c663d820 100644 --- a/hathor/api_util.py +++ b/hathor/api_util.py @@ -152,7 +152,7 @@ def get_arg_default(args: dict[bytes, list[bytes]], key: str, default: T) -> T: bkey = key.encode() values = args.get(bkey) if not values: - return cast(T, default) + return default value: bytes = values[0] if isinstance(default, int): return cast(T, int(value)) diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 860feafee..e15e8c914 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -46,7 +46,8 @@ from hathor.storage import RocksDBStorage from hathor.stratum import StratumFactory from hathor.transaction.json_serializer import VertexJsonSerializer -from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage, TransactionStorage +from hathor.transaction.storage import TransactionRocksDBStorage, TransactionStorage +from hathor.transaction.storage.rocksdb_storage import CacheConfig from hathor.transaction.vertex_children import RocksDBVertexChildrenService from hathor.transaction.vertex_parser import VertexParser from hathor.util import Random, get_environment_info @@ -102,7 +103,7 @@ class BuildArtifacts(NamedTuple): tx_storage: TransactionStorage feature_service: FeatureService bit_signaling_service: BitSignalingService - indexes: Optional[IndexesManager] + indexes: IndexesManager wallet: Optional[BaseWallet] rocksdb_storage: RocksDBStorage stratum_factory: Optional[StratumFactory] @@ -410,18 +411,17 @@ def _get_or_create_nc_log_storage(self) -> NCLogStorage: def _get_or_create_consensus(self) -> ConsensusAlgorithm: if self._consensus is None: soft_voided_tx_ids = self._get_soft_voided_tx_ids() - pubsub = self._get_or_create_pubsub() nc_storage_factory = self._get_or_create_nc_storage_factory() nc_calls_sorter = self._get_nc_calls_sorter() self._consensus = ConsensusAlgorithm( nc_storage_factory=nc_storage_factory, soft_voided_tx_ids=soft_voided_tx_ids, - pubsub=pubsub, settings=self._get_or_create_settings(), runner_factory=self._get_or_create_runner_factory(), nc_log_storage=self._get_or_create_nc_log_storage(), nc_calls_sorter=nc_calls_sorter, feature_service=self._get_or_create_feature_service(), + tx_storage=self._get_or_create_tx_storage(), ) return self._consensus @@ -506,38 +506,27 @@ def _get_or_create_tx_storage(self) -> TransactionStorage: self._tx_storage.indexes = indexes return self._tx_storage - store_indexes: Optional[IndexesManager] = indexes + cache_config: CacheConfig | None = None if self._tx_storage_cache: - store_indexes = None + cache_config = CacheConfig() + if self._tx_storage_cache_capacity is not None: + cache_config.capacity = self._tx_storage_cache_capacity rocksdb_storage = self._get_or_create_rocksdb_storage() nc_storage_factory = self._get_or_create_nc_storage_factory() vertex_parser = self._get_or_create_vertex_parser() vertex_children_service = RocksDBVertexChildrenService(rocksdb_storage) self._tx_storage = TransactionRocksDBStorage( - rocksdb_storage, - indexes=store_indexes, + reactor=self._get_reactor(), + rocksdb_storage=rocksdb_storage, + indexes=indexes, settings=settings, vertex_parser=vertex_parser, nc_storage_factory=nc_storage_factory, vertex_children_service=vertex_children_service, + cache_config=cache_config, ) - if self._tx_storage_cache: - reactor = self._get_reactor() - kwargs: dict[str, Any] = {} - if self._tx_storage_cache_capacity is not None: - kwargs['capacity'] = self._tx_storage_cache_capacity - self._tx_storage = TransactionCacheStorage( - self._tx_storage, - reactor, - indexes=indexes, - settings=settings, - nc_storage_factory=nc_storage_factory, - vertex_children_service=vertex_children_service, - **kwargs - ) - return self._tx_storage def _get_or_create_event_storage(self) -> EventStorage: @@ -666,7 +655,6 @@ def _get_or_create_vertex_handler(self) -> VertexHandler: feature_service=self._get_or_create_feature_service(), execution_manager=self._get_or_create_execution_manager(), pubsub=self._get_or_create_pubsub(), - wallet=self._get_or_create_wallet(), ) return self._vertex_handler @@ -794,9 +782,8 @@ def enable_event_queue(self) -> 'Builder': def set_tx_storage(self, tx_storage: TransactionStorage) -> 'Builder': self.check_if_can_modify() self._tx_storage = tx_storage - internal = tx_storage.store if isinstance(tx_storage, TransactionCacheStorage) else tx_storage - assert isinstance(internal, TransactionRocksDBStorage) - self._rocksdb_storage = internal._rocksdb_storage + assert isinstance(tx_storage, TransactionRocksDBStorage) + self._rocksdb_storage = tx_storage._rocksdb_storage return self def set_event_storage(self, event_storage: EventStorage) -> 'Builder': diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index 23ee34fc2..7c5dde482 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -315,7 +315,6 @@ def create_resources(self) -> server.Site: parent.putChild(url_path, resource) # Websocket resource - assert self.manager.tx_storage.indexes is not None ws_factory = HathorAdminWebsocketFactory(manager=self.manager, metrics=self.manager.metrics, address_index=self.manager.tx_storage.indexes.addresses) diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index 301a2659e..ed4242a0e 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -13,7 +13,7 @@ # limitations under the License. from hathor.checkpoint import Checkpoint as cp -from hathor.conf.settings import HathorSettings, NanoContractsSetting +from hathor.conf.settings import FeatureSetting, HathorSettings from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.criteria import Criteria from hathor.feature_activation.settings import Settings as FeatureActivationSettings @@ -213,7 +213,8 @@ '00004305882eb3eef6b45f025ff58eb7baa5ca35f7d6f42c8b085482b00474e6', '000045ecbab77c9a8d819ff6d26893b9da2774eee5539f17d8fc2394f82b758e', ])), - ENABLE_NANO_CONTRACTS=NanoContractsSetting.FEATURE_ACTIVATION, + ENABLE_NANO_CONTRACTS=FeatureSetting.FEATURE_ACTIVATION, + ENABLE_FEE_BASED_TOKENS=FeatureSetting.DISABLED, NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES=[ 'HDkKGHwDHTuUGbhET73XdTJZkS8uU7PHf9', 'HUbxYhtqW8pdRCC2WngPxN7MB4SUMDPrrh', @@ -256,6 +257,26 @@ version='0.67.0', signal_support_by_default=True, ), + Feature.FEE_TOKENS: Criteria( + # XXX: parity with hathor/conf/mainnet.yml + bit=2, + start_height=6_249_600, + timeout_height=6_592_320, + minimum_activation_height=6_350_400, + lock_in_on_timeout=False, + version='0.69.0', + signal_support_by_default=True, + ), + Feature.OPCODES_V2: Criteria( + # XXX: parity with hathor/conf/mainnet.yml + bit=3, + start_height=6_249_600, + timeout_height=6_592_320, + minimum_activation_height=6_350_400, + lock_in_on_timeout=False, + version='0.69.0', + signal_support_by_default=True, + ), } ) ) diff --git a/hathor/conf/mainnet.yml b/hathor/conf/mainnet.yml index 2428d95e5..6f1080b1e 100644 --- a/hathor/conf/mainnet.yml +++ b/hathor/conf/mainnet.yml @@ -230,6 +230,26 @@ FEATURE_ACTIVATION: version: 0.67.0 signal_support_by_default: true + FEE_TOKENS: + bit: 2 + # Right now the best block is 6_248_703 at Tuesday, 2026-01-27 22:45:36 GMT + start_height: 6_249_600 # expected around 2026-02-04 06:14:06 GMT + timeout_height: 6_592_320 # 16 weeks, expected around 2026-05-27 06:14:06 GMT + minimum_activation_height: 6_350_400 # 4 weeks, expected around 2026-03-04 06:14:06 GMT + lock_in_on_timeout: false + version: 0.69.0 + signal_support_by_default: true + + OPCODES_V2: + bit: 3 + # Right now the best block is 6_248_703 at Tuesday, 2026-01-27 21:45:36 GMT + start_height: 6_249_600 # expected around 2026-02-04 06:14:06 GMT + timeout_height: 6_592_320 # 16 weeks, expected around 2026-05-27 06:14:06 GMT + minimum_activation_height: 6_350_400 # 4 weeks, expected around 2026-03-04 06:14:06 GMT + lock_in_on_timeout: false + version: 0.69.0 + signal_support_by_default: true + ENABLE_NANO_CONTRACTS: feature_activation NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: - HDkKGHwDHTuUGbhET73XdTJZkS8uU7PHf9 diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index fadcb3f52..d135e0881 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -35,8 +35,8 @@ @unique -class NanoContractsSetting(StrEnum): - """Enum to configure the state of the Nano Contracts feature.""" +class FeatureSetting(StrEnum): + """Enum to configure the state of a feature.""" # Completely disabled. DISABLED = auto() @@ -49,14 +49,14 @@ class NanoContractsSetting(StrEnum): def __bool__(self) -> bool: """ - >>> bool(NanoContractsSetting.DISABLED) + >>> bool(FeatureSetting.DISABLED) False - >>> bool(NanoContractsSetting.ENABLED) + >>> bool(FeatureSetting.ENABLED) True - >>> bool(NanoContractsSetting.FEATURE_ACTIVATION) + >>> bool(FeatureSetting.FEATURE_ACTIVATION) True """ - return self in (NanoContractsSetting.ENABLED, NanoContractsSetting.FEATURE_ACTIVATION) + return self in (FeatureSetting.ENABLED, FeatureSetting.FEATURE_ACTIVATION) class HathorSettings(NamedTuple): @@ -480,7 +480,13 @@ def GENESIS_TX2_TIMESTAMP(self) -> int: MAX_UNVERIFIED_PEERS_PER_CONN: int = 100 # Used to enable nano contracts. - ENABLE_NANO_CONTRACTS: NanoContractsSetting = NanoContractsSetting.DISABLED + ENABLE_NANO_CONTRACTS: FeatureSetting = FeatureSetting.DISABLED + + # Used to enable fee-based tokens. + ENABLE_FEE_BASED_TOKENS: FeatureSetting = FeatureSetting.DISABLED + + # Used to enable opcodes V2. + ENABLE_OPCODES_V2: FeatureSetting = FeatureSetting.DISABLED # List of enabled blueprints. BLUEPRINTS: dict[bytes, str] = {} diff --git a/hathor/conf/testnet.yml b/hathor/conf/testnet.yml index ac18ce7ea..25903a5c7 100644 --- a/hathor/conf/testnet.yml +++ b/hathor/conf/testnet.yml @@ -41,13 +41,46 @@ FEATURE_ACTIVATION: version: 0.67.0 signal_support_by_default: true + # current height: 295_587 at 2026-01-20 19:45:03 UTC + FEE_TOKENS: + bit: 1 + start_height: 295_680 # expected around 2026-01-20 20:31:33 UTC + timeout_height: 304_320 # 36 evaluation periods (72 hours) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.69.0 + signal_support_by_default: true + + # current height: 335_348 at 2026-02-03 17:12:47 GMT + OPCODES_V2: + bit: 0 + start_height: 335_520 # expected around 2026-02-03 18:38:47 GMT + timeout_height: 347_040 # 48 evaluation periods (96 hours) + minimum_activation_height: 0 + lock_in_on_timeout: false + version: 0.69.0 + signal_support_by_default: true + ENABLE_NANO_CONTRACTS: feature_activation +ENABLE_FEE_BASED_TOKENS: feature_activation NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: - WWFiNeWAFSmgtjm4ht2MydwS5GY3kMJsEK - WQFDxic8xWWnMLL4aE5abY2XRKPNvGhtjY SOFT_VOIDED_TX_IDS: - 00000000e03c4aa950f87cb2c0317f0e5fbd58561ad9a30916126426dcd5f283 + - 000002681b995cb043583f5a747c185d4db7b1ee6621ad75fc155c9e8ac12846 + - 00000f69311fc345b9a8919282ef85da5adfaf10690568ed3fbc290de218a056 + - 00000eea02e10b227ce1635a73714c4bcca207605d3d38fd44d0e8a26f4dc3f5 + - 00000f5bda0cbfa1fbc05b021c7b7be8423bdc1663e83cef02c9db7dc9e2a664 + - 0000065da27407862eb0fc5befc0611ce0b41120c3ef2b55ff51a71ec989028d + - 00000c14a7f11318bc362b58ad018e01f56f8c63def4e6bcda0555f6cef2ebc3 SKIP_VERIFICATION: - 00000000e03c4aa950f87cb2c0317f0e5fbd58561ad9a30916126426dcd5f283 + - 000002681b995cb043583f5a747c185d4db7b1ee6621ad75fc155c9e8ac12846 + - 00000f69311fc345b9a8919282ef85da5adfaf10690568ed3fbc290de218a056 + - 00000eea02e10b227ce1635a73714c4bcca207605d3d38fd44d0e8a26f4dc3f5 + - 00000f5bda0cbfa1fbc05b021c7b7be8423bdc1663e83cef02c9db7dc9e2a664 + - 0000065da27407862eb0fc5befc0611ce0b41120c3ef2b55ff51a71ec989028d + - 00000c14a7f11318bc362b58ad018e01f56f8c63def4e6bcda0555f6cef2ebc3 diff --git a/hathor/conf/unittests.yml b/hathor/conf/unittests.yml index ebc6a3e1b..ce6d870b5 100644 --- a/hathor/conf/unittests.yml +++ b/hathor/conf/unittests.yml @@ -24,6 +24,7 @@ FEATURE_ACTIVATION: default_threshold: 3 ENABLE_NANO_CONTRACTS: enabled +ENABLE_FEE_BASED_TOKENS: enabled NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: # keypair wallet: diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index 53b3e4feb..27687acf3 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -14,20 +14,16 @@ from __future__ import annotations -import hashlib -import traceback from itertools import chain from typing import TYPE_CHECKING, Any, Iterable, Optional from structlog import get_logger -from typing_extensions import assert_never from hathor.consensus.context import ReorgInfo -from hathor.feature_activation.feature import Feature +from hathor.execution_manager import non_critical_code +from hathor.feature_activation.utils import Features from hathor.transaction import BaseTransaction, Block, Transaction -from hathor.transaction.exceptions import TokenNotFound from hathor.transaction.nc_execution_state import NCExecutionState -from hathor.transaction.types import MetaNCCallRecord from hathor.util import classproperty from hathor.utils.weight import weight_to_work @@ -35,10 +31,8 @@ from hathor.conf.settings import HathorSettings from hathor.consensus.context import ConsensusAlgorithmContext from hathor.feature_activation.feature_service import FeatureService + from hathor.nanocontracts.execution import NCBlockExecutor from hathor.nanocontracts.nc_exec_logs import NCLogStorage - from hathor.nanocontracts.runner import Runner - from hathor.nanocontracts.runner.runner import RunnerFactory - from hathor.nanocontracts.storage import NCBlockStorage logger = get_logger() @@ -50,20 +44,15 @@ class BlockConsensusAlgorithm: def __init__( self, - settings: HathorSettings, + settings: 'HathorSettings', context: 'ConsensusAlgorithmContext', - runner_factory: RunnerFactory, - nc_log_storage: NCLogStorage, - feature_service: FeatureService, - *, - nc_exec_fail_trace: bool = False, + block_executor: 'NCBlockExecutor', + feature_service: 'FeatureService', ) -> None: self._settings = settings self.context = context - self._runner_factory = runner_factory - self._nc_log_storage = nc_log_storage + self._block_executor = block_executor self.feature_service = feature_service - self.nc_exec_fail_trace = nc_exec_fail_trace @classproperty def log(cls) -> Any: @@ -85,276 +74,25 @@ def update_consensus(self, block: Block) -> None: def _nc_initialize_empty(self, block: Block) -> None: """Initialize a block with an empty contract trie.""" - meta = block.get_metadata() - block_storage = self.context.consensus.nc_storage_factory.get_empty_block_storage() - block_storage.commit() - if meta.nc_block_root_id is not None: - assert meta.nc_block_root_id == block_storage.get_root_id() - else: - meta.nc_block_root_id = block_storage.get_root_id() - self.context.save(block) + self._block_executor.initialize_empty(block, self.context) def execute_nano_contracts(self, block: Block) -> None: """Execute the method calls for transactions confirmed by this block handling reorgs.""" - # If we reach this point, Nano Contracts must be enabled. - assert self._settings.ENABLE_NANO_CONTRACTS - assert not block.is_genesis - - meta = block.get_metadata() - if meta.voided_by: - # If the block is voided, skip execution. - return - - assert meta.nc_block_root_id is None - - to_be_executed: list[Block] = [] - is_reorg: bool = False - if self.context.reorg_info: - # handle reorgs - is_reorg = True - cur = block - # XXX We could stop when `cur_meta.nc_block_root_id is not None` but - # first we need to refactor meta.first_block and meta.voided_by to - # have different values per block. - while cur != self.context.reorg_info.common_block: - cur_meta = cur.get_metadata() - if cur_meta.nc_block_root_id is not None: - # Reset nc_block_root_id to force re-execution. - cur_meta.nc_block_root_id = None - to_be_executed.append(cur) - cur = cur.get_block_parent() - else: - # No reorg occurred, so we execute all unexecuted blocks. - # Normally it's just the current block, but it's possible to have - # voided and therefore unexecuted blocks connected to the best chain, - # for example when a block is voided by a transaction. - cur = block - while True: - cur_meta = cur.get_metadata() - if cur_meta.nc_block_root_id is not None: - break - to_be_executed.append(cur) - if cur.is_genesis: - break - cur = cur.get_block_parent() - - for current in to_be_executed[::-1]: - self._nc_execute_calls(current, is_reorg=is_reorg) + self._block_executor.execute_chain( + block, + self.context, + on_failure=self.mark_as_nc_fail_execution, + ) def _should_execute_nano(self, block: Block) -> bool: """ Determine whether we should proceed to execute Nano transactions while making the necessary initializations. """ - from hathor.conf.settings import NanoContractsSetting assert not block.is_genesis - match self._settings.ENABLE_NANO_CONTRACTS: - case NanoContractsSetting.ENABLED: - return True - - case NanoContractsSetting.FEATURE_ACTIVATION: - parent = block.get_block_parent() - is_active_on_parent = self.feature_service.is_feature_active( - vertex=parent, - feature=Feature.NANO_CONTRACTS, - ) - return is_active_on_parent - - case NanoContractsSetting.DISABLED: - return False - - case _: # pragma: no cover - assert_never(self._settings.ENABLE_NANO_CONTRACTS) - - def _nc_execute_calls(self, block: Block, *, is_reorg: bool) -> None: - """Internal method to execute the method calls for transactions confirmed by this block. - """ - from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, NCFail - from hathor.nanocontracts.types import Address - - assert self._settings.ENABLE_NANO_CONTRACTS - - if block.is_genesis: - # XXX We can remove this call after the full node initialization is refactored and - # the genesis block goes through the consensus protocol. - self._nc_initialize_empty(block) - return - - meta = block.get_metadata() - assert not meta.voided_by - assert meta.nc_block_root_id is None - parent = block.get_block_parent() - parent_meta = parent.get_metadata() - block_root_id = parent_meta.nc_block_root_id - assert block_root_id is not None - - nc_calls: list[Transaction] = [] - for tx in block.iter_transactions_in_this_block(): - if not tx.is_nano_contract(): - # Skip other type of transactions. - continue - tx_meta = tx.get_metadata() - if is_reorg: - assert self.context.reorg_info is not None - # Clear the NC_EXECUTION_FAIL_ID flag if this is the only reason the transaction was voided. - # This case might only happen when handling reorgs. - assert tx.storage is not None - if tx_meta.voided_by == {tx.hash, NC_EXECUTION_FAIL_ID}: - if tx_meta.conflict_with: - for tx_conflict_id in tx_meta.conflict_with: - tx_conflict = tx.storage.get_transaction(tx_conflict_id) - tx_conflict_meta = tx_conflict.get_metadata() - assert tx_conflict_meta.first_block is None - assert tx_conflict_meta.voided_by - self.context.transaction_algorithm.remove_voided_by(tx, tx.hash) - tx_meta.voided_by = None - self.context.save(tx) - tx_meta.nc_execution = NCExecutionState.PENDING - nc_calls.append(tx) - - if not nc_calls: - meta.nc_block_root_id = block_root_id - self.context.save(block) - return - - nc_sorted_calls = self.context.consensus.nc_calls_sorter(block, nc_calls) - block_storage = self.context.consensus.nc_storage_factory.get_block_storage(block_root_id) - seed_hasher = hashlib.sha256(block.hash) - - for tx in nc_sorted_calls: - seed_hasher.update(tx.hash) - seed_hasher.update(block_storage.get_root_id()) - - tx_meta = tx.get_metadata() - if tx_meta.voided_by: - # Skip voided transactions. This might happen if a previous tx in nc_calls fails and - # mark this tx as voided. - tx_meta.nc_execution = NCExecutionState.SKIPPED - self.context.save(tx) - # Update seqnum even for skipped nano transactions. - nc_header = tx.get_nano_header() - seqnum = block_storage.get_address_seqnum(Address(nc_header.nc_address)) - if nc_header.nc_seqnum > seqnum: - block_storage.set_address_seqnum(Address(nc_header.nc_address), nc_header.nc_seqnum) - continue - - runner = self._runner_factory.create(block_storage=block_storage, seed=seed_hasher.digest()) - exception_and_tb: tuple[NCFail, str] | None = None - token_dict = tx.get_complete_token_info(block_storage) - should_verify_sum_after_execution = any(token_info.version is None for token_info in token_dict.values()) - - try: - runner.execute_from_tx(tx) - - # after the execution we have the latest state in the storage - # and at this point no tokens pending creation - if should_verify_sum_after_execution: - self._verify_sum_after_execution(tx, block_storage) - - except NCFail as e: - kwargs: dict[str, Any] = {} - if tx.name: - kwargs['__name'] = tx.name - if self.nc_exec_fail_trace: - kwargs['exc_info'] = True - self.log.info( - 'nc execution failed', - tx=tx.hash.hex(), - error=repr(e), - cause=repr(e.__cause__), - **kwargs, - ) - exception_and_tb = e, traceback.format_exc() - self.mark_as_nc_fail_execution(tx) - else: - tx_meta.nc_execution = NCExecutionState.SUCCESS - self.context.save(tx) - # TODO Avoid calling multiple commits for the same contract. The best would be to call the commit - # method once per contract per block, just like we do for the block_storage. This ensures we will - # have a clean database with no orphan nodes. - runner.commit() - - # Update metadata. - self.nc_update_metadata(tx, runner) - - # Update indexes. This must be after metadata is updated. - assert tx.storage is not None - assert tx.storage.indexes is not None - tx.storage.indexes.handle_contract_execution(tx) - - # Pubsub event to indicate execution success - self.context.nc_exec_success.append(tx) - - # We only emit events when the nc is successfully executed. - assert self.context.nc_events is not None - last_call_info = runner.get_last_call_info() - events_list = last_call_info.nc_logger.__events__ - self.context.nc_events.append((tx, events_list)) - - # Store events in transaction metadata - if events_list: - tx_meta.nc_events = [(event.nc_id, event.data) for event in events_list] - self.context.save(tx) - finally: - # We save logs regardless of whether the nc successfully executed. - self._nc_log_storage.save_logs(tx, runner.get_last_call_info(), exception_and_tb) - - # Save block state root id. If nothing happens, it should be the same as its block parent. - block_storage.commit() - assert block_storage.get_root_id() is not None - meta.nc_block_root_id = block_storage.get_root_id() - self.context.save(block) - - for tx in nc_calls: - tx_meta = tx.get_metadata() - assert tx_meta.nc_execution is not None - self.log.info('nano tx execution status', - blk=block.hash.hex(), - tx=tx.hash.hex(), - execution=tx_meta.nc_execution.value) - match tx_meta.nc_execution: - case NCExecutionState.PENDING: # pragma: no cover - assert False, 'unexpected pending state' # should never happen - case NCExecutionState.SUCCESS: - assert tx_meta.voided_by is None - case NCExecutionState.FAILURE: - assert tx_meta.voided_by == {tx.hash, NC_EXECUTION_FAIL_ID} - case NCExecutionState.SKIPPED: - assert tx_meta.voided_by - assert NC_EXECUTION_FAIL_ID not in tx_meta.voided_by - case _: # pragma: no cover - assert_never(tx_meta.nc_execution) - - def _verify_sum_after_execution(self, tx: Transaction, block_storage: NCBlockStorage) -> None: - from hathor import NCFail - from hathor.verification.transaction_verifier import TransactionVerifier - try: - token_dict = tx.get_complete_token_info(block_storage) - TransactionVerifier.verify_sum(self._settings, token_dict) - except TokenNotFound as e: - # At this point, any nonexistent token would have made a prior validation fail. For example, if there - # was a withdrawal of a nonexistent token, it would have failed in the balance validation before. - raise AssertionError from e - except Exception as e: - raise NCFail from e - - def nc_update_metadata(self, tx: Transaction, runner: 'Runner') -> None: - from hathor.nanocontracts.runner.call_info import CallType - - meta = tx.get_metadata() - assert meta.nc_execution == NCExecutionState.SUCCESS - call_info = runner.get_last_call_info() - assert call_info.calls is not None - nc_calls = [ - MetaNCCallRecord.from_call_record(call) - for call in call_info.calls if call.type == CallType.PUBLIC - ] - - # Update metadata. - assert meta.nc_calls is None - meta.nc_calls = nc_calls - self.context.save(tx) + features = Features.from_vertex(settings=self._settings, feature_service=self.feature_service, vertex=parent) + return features.nanocontracts def mark_as_nc_fail_execution(self, tx: Transaction) -> None: """Mark that a transaction failed execution. It also propagates its voidedness through the DAG of funds.""" @@ -426,9 +164,7 @@ def update_voided_info(self, block: Block) -> None: return assert block.storage is not None - storage = block.storage - assert storage.indexes is not None # Union of voided_by of parents voided_by: set[bytes] = self.union_voided_by_from_parents(block) @@ -530,7 +266,6 @@ def mark_as_reorg_if_needed(self, common_block: Block, new_best_block: Block) -> """Mark as reorg only if reorg size > 0.""" assert new_best_block.storage is not None storage = new_best_block.storage - assert storage.indexes is not None _, old_best_block_hash = storage.indexes.height.get_height_tip() old_best_block = storage.get_transaction(old_best_block_hash) assert isinstance(old_best_block, Block) @@ -583,7 +318,9 @@ def update_voided_by_from_parents(self, block: Block) -> bool: else: meta.voided_by = voided_by.copy() self.context.save(block) - block.storage.del_from_indexes(block, relax_assert=True) + block.storage.indexes.del_from_critical_indexes(block) + with non_critical_code(self.log): + block.storage.indexes.del_from_non_critical_indexes(block) return True return False @@ -747,19 +484,19 @@ def remove_first_block_markers(self, block: Block) -> None: bfs = BFSTimestampWalk(storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) for tx in bfs.run(block, skip_root=True): if tx.is_block: - bfs.skip_neighbors(tx) + bfs.skip_neighbors() continue meta = tx.get_metadata() if meta.first_block != block.hash: - bfs.skip_neighbors(tx) + bfs.skip_neighbors() continue if tx.is_nano_contract(): if meta.nc_execution == NCExecutionState.SUCCESS: assert tx.storage is not None - assert tx.storage.indexes is not None - tx.storage.indexes.handle_contract_unexecution(tx) + with non_critical_code(self.log): + tx.storage.indexes.non_critical_handle_contract_unexecution(tx) meta.nc_execution = NCExecutionState.PENDING meta.nc_calls = None meta.nc_events = None @@ -770,6 +507,7 @@ def remove_first_block_markers(self, block: Block) -> None: meta.voided_by = None meta.first_block = None self.context.save(tx) + bfs.add_neighbors() def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], mark_as_best_chain: bool, newest_timestamp: int) -> int: @@ -798,11 +536,11 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], for tx in bfs.run(parent, skip_root=False): assert tx.hash is not None if tx.is_block: - bfs.skip_neighbors(tx) + bfs.skip_neighbors() continue if tx.hash in used: - bfs.skip_neighbors(tx) + bfs.skip_neighbors() continue used.add(tx.hash) @@ -810,7 +548,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], if meta.first_block: first_block = storage.get_transaction(meta.first_block) if first_block.timestamp <= newest_timestamp: - bfs.skip_neighbors(tx) + bfs.skip_neighbors() continue if mark_as_best_chain: @@ -819,6 +557,7 @@ def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], self.context.save(tx) score += weight_to_work(tx.weight) + bfs.add_neighbors() # Always save the score when it is calculated. meta = block.get_metadata() @@ -857,28 +596,27 @@ def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> class BlockConsensusAlgorithmFactory: - __slots__ = ('settings', 'nc_log_storage', '_runner_factory', 'feature_service', 'nc_exec_fail_trace') + __slots__ = ('settings', 'block_executor', 'feature_service') def __init__( self, - settings: HathorSettings, - runner_factory: RunnerFactory, - nc_log_storage: NCLogStorage, - feature_service: FeatureService, - *, - nc_exec_fail_trace: bool = False, + settings: 'HathorSettings', + block_executor: 'NCBlockExecutor', + feature_service: 'FeatureService', ) -> None: self.settings = settings - self._runner_factory = runner_factory - self.nc_log_storage = nc_log_storage + self.block_executor = block_executor self.feature_service = feature_service - self.nc_exec_fail_trace = nc_exec_fail_trace + + @property + def nc_log_storage(self) -> 'NCLogStorage': + """Expose nc_log_storage for tests that need to access it.""" + return self.block_executor._nc_log_storage def __call__(self, context: 'ConsensusAlgorithmContext') -> BlockConsensusAlgorithm: return BlockConsensusAlgorithm( self.settings, context, - self._runner_factory, - self.nc_log_storage, + self.block_executor, self.feature_service, ) diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py index 8c71e7861..dbf1b0fda 100644 --- a/hathor/consensus/consensus.py +++ b/hathor/consensus/consensus.py @@ -15,18 +15,24 @@ from __future__ import annotations from collections import defaultdict -from typing import TYPE_CHECKING, Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Callable, assert_never from structlog import get_logger from hathor.consensus.block_consensus import BlockConsensusAlgorithmFactory from hathor.consensus.context import ConsensusAlgorithmContext from hathor.consensus.transaction_consensus import TransactionConsensusAlgorithmFactory +from hathor.execution_manager import non_critical_code +from hathor.feature_activation.feature import Feature +from hathor.nanocontracts.exception import NCInvalidSignature +from hathor.nanocontracts.execution import NCBlockExecutor from hathor.profiler import get_cpu_profiler -from hathor.pubsub import HathorEvents, PubSubManager -from hathor.transaction import BaseTransaction, Transaction -from hathor.transaction.exceptions import RewardLocked +from hathor.pubsub import HathorEvents +from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.exceptions import InvalidInputData, RewardLocked, TooManySigOps from hathor.util import not_none +from hathor.verification.verification_params import VerificationParams if TYPE_CHECKING: from hathor.conf.settings import HathorSettings @@ -43,6 +49,12 @@ _base_transaction_log = logger.new() +@dataclass(slots=True, frozen=True, kw_only=True) +class ConsensusEvent: + event: HathorEvents + kwargs: dict[str, Any] + + class ConsensusAlgorithm: """Execute the consensus algorithm marking blocks and transactions as either executed or voided. @@ -73,9 +85,9 @@ def __init__( self, nc_storage_factory: 'NCStorageFactory', soft_voided_tx_ids: set[bytes], - pubsub: PubSubManager, *, settings: HathorSettings, + tx_storage: TransactionStorage, runner_factory: RunnerFactory, nc_calls_sorter: NCSorterCallable, nc_log_storage: NCLogStorage, @@ -84,11 +96,22 @@ def __init__( ) -> None: self._settings = settings self.log = logger.new() - self._pubsub = pubsub + self.tx_storage = tx_storage self.nc_storage_factory = nc_storage_factory self.soft_voided_tx_ids = frozenset(soft_voided_tx_ids) + + # Create NCBlockExecutor with all NC-related dependencies + self._block_executor = NCBlockExecutor( + settings=settings, + runner_factory=runner_factory, + nc_storage_factory=nc_storage_factory, + nc_log_storage=nc_log_storage, + nc_calls_sorter=nc_calls_sorter, + nc_exec_fail_trace=nc_exec_fail_trace, + ) + self.block_algorithm_factory = BlockConsensusAlgorithmFactory( - settings, runner_factory, nc_log_storage, feature_service, nc_exec_fail_trace=nc_exec_fail_trace, + settings, self._block_executor, feature_service, ) self.transaction_algorithm_factory = TransactionConsensusAlgorithmFactory() self.nc_calls_sorter = nc_calls_sorter @@ -96,10 +119,10 @@ def __init__( def create_context(self) -> ConsensusAlgorithmContext: """Handy method to create a context that can be used to access block and transaction algorithms.""" - return ConsensusAlgorithmContext(self, self._pubsub) + return ConsensusAlgorithmContext(self) @cpu.profiler(key=lambda self, base: 'consensus!{}'.format(base.hash.hex())) - def unsafe_update(self, base: BaseTransaction) -> None: + def unsafe_update(self, base: BaseTransaction) -> list[ConsensusEvent]: """ Run a consensus update with its own context, indexes will be updated accordingly. @@ -107,8 +130,7 @@ def unsafe_update(self, base: BaseTransaction) -> None: if this method throws any exception. """ from hathor.transaction import Block, Transaction - assert base.storage is not None - assert base.storage.is_only_valid_allowed() + assert self.tx_storage.is_only_valid_allowed() meta = base.get_metadata() assert meta.validation.is_valid() @@ -120,13 +142,10 @@ def unsafe_update(self, base: BaseTransaction) -> None: # this context instance will live only while this update is running context = self.create_context() - assert base.storage is not None - storage = base.storage - assert storage.indexes is not None - best_height, best_tip = storage.indexes.height.get_height_tip() + best_height, best_tip = self.tx_storage.indexes.height.get_height_tip() # This has to be called before the removal of vertices, otherwise this call may fail. - old_best_block = base.storage.get_transaction(best_tip) + old_best_block = self.tx_storage.get_block(best_tip) if isinstance(base, Transaction): context.transaction_algorithm.update_consensus(base) @@ -138,11 +157,10 @@ def unsafe_update(self, base: BaseTransaction) -> None: # signal a mempool tips index update for all affected transactions, # because that index is used on _compute_vertices_that_became_invalid below. for tx_affected in _sorted_affected_txs(context.txs_affected): - if storage.indexes.mempool_tips is not None: - storage.indexes.mempool_tips.update(tx_affected) + self.tx_storage.indexes.mempool_tips.update(tx_affected) txs_to_remove: list[BaseTransaction] = [] - new_best_height, new_best_tip = storage.indexes.height.get_height_tip() + new_best_height, new_best_tip = self.tx_storage.indexes.height.get_height_tip() if context.reorg_info is not None: if new_best_height < best_height: @@ -152,20 +170,24 @@ def unsafe_update(self, base: BaseTransaction) -> None: ) # XXX: this method will mark as INVALID all transactions in the mempool that became invalid after the reorg - txs_to_remove.extend(self._compute_vertices_that_became_invalid(storage, new_best_height)) + txs_to_remove.extend( + self._compute_vertices_that_became_invalid(new_best_block=context.reorg_info.new_best_block) + ) if txs_to_remove: self.log.warn('some transactions on the mempool became invalid and will be removed', count=len(txs_to_remove)) # XXX: because transactions in `txs_to_remove` are marked as invalid, we need this context to be # able to remove them - with storage.allow_invalid_context(): - self._remove_transactions(txs_to_remove, storage, context) + with self.tx_storage.allow_invalid_context(): + self._remove_transactions(txs_to_remove, context) + + pubsub_events = [] # emit the reorg started event if needed if context.reorg_info is not None: assert isinstance(old_best_block, Block) - new_best_block = base.storage.get_transaction(new_best_tip) + new_best_block = self.tx_storage.get_transaction(new_best_tip) reorg_size = old_best_block.get_height() - context.reorg_info.common_block.get_height() # TODO: After we remove block ties, should the assert below be true? # assert old_best_block.get_metadata().voided_by @@ -178,26 +200,28 @@ def unsafe_update(self, base: BaseTransaction) -> None: new_best_block=new_best_block.hash_hex, common_block=context.reorg_info.common_block.hash_hex, ) - context.pubsub.publish( - HathorEvents.REORG_STARTED, - old_best_height=best_height, - old_best_block=old_best_block, - new_best_height=new_best_height, - new_best_block=new_best_block, - common_block=context.reorg_info.common_block, - reorg_size=reorg_size, - ) + pubsub_events.append(ConsensusEvent( + event=HathorEvents.REORG_STARTED, + kwargs=dict( + old_best_height=best_height, + old_best_block=old_best_block, + new_best_height=new_best_height, + new_best_block=new_best_block, + common_block=context.reorg_info.common_block, + reorg_size=reorg_size, + ) + )) # finally signal an index update for all affected transactions for tx_affected in _sorted_affected_txs(context.txs_affected): - assert tx_affected.storage is not None - assert tx_affected.storage.indexes is not None - tx_affected.storage.indexes.update(tx_affected) - context.pubsub.publish(HathorEvents.CONSENSUS_TX_UPDATE, tx=tx_affected) + self.tx_storage.indexes.update_critical_indexes(tx_affected) + with non_critical_code(self.log): + self.tx_storage.indexes.update_non_critical_indexes(tx_affected) + pubsub_events.append(ConsensusEvent(event=HathorEvents.CONSENSUS_TX_UPDATE, kwargs=dict(tx=tx_affected))) # signal all transactions of which the execution succeeded for tx_nc_success in context.nc_exec_success: - context.pubsub.publish(HathorEvents.NC_EXEC_SUCCESS, tx=tx_nc_success) + pubsub_events.append(ConsensusEvent(event=HathorEvents.NC_EXEC_SUCCESS, kwargs=dict(tx=tx_nc_success))) # handle custom NC events if isinstance(base, Block): @@ -205,17 +229,21 @@ def unsafe_update(self, base: BaseTransaction) -> None: for tx, events in context.nc_events: assert tx.is_nano_contract() for event in events: - context.pubsub.publish(HathorEvents.NC_EVENT, tx=tx, nc_event=event) + pubsub_events.append( + ConsensusEvent(event=HathorEvents.NC_EVENT, kwargs=dict(tx=tx, nc_event=event)) + ) else: assert context.nc_events is None # And emit events for txs that were removed for tx_removed in txs_to_remove: - context.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, tx=tx_removed) + pubsub_events.append(ConsensusEvent(event=HathorEvents.CONSENSUS_TX_REMOVED, kwargs=dict(tx=tx_removed))) # and also emit the reorg finished event if needed if context.reorg_info is not None: - context.pubsub.publish(HathorEvents.REORG_FINISHED) + pubsub_events.append(ConsensusEvent(event=HathorEvents.REORG_FINISHED, kwargs={})) + + return pubsub_events def filter_out_voided_by_entries_from_parents(self, tx: BaseTransaction, voided_by: set[bytes]) -> set[bytes]: """Filter out voided_by entries that should be inherited from parents.""" @@ -241,8 +269,7 @@ def _filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: set[by continue if h in self.soft_voided_tx_ids: continue - assert tx.storage is not None - tx3 = tx.storage.get_transaction(h) + tx3 = self.tx_storage.get_transaction(h) tx3_meta = tx3.get_metadata() tx3_voided_by: set[bytes] = tx3_meta.voided_by or set() if not (self.soft_voided_tx_ids & tx3_voided_by): @@ -266,8 +293,7 @@ def _filter_out_nc_fail_entries(self, tx: BaseTransaction, voided_by: set[bytes] continue if h == tx.hash: continue - assert tx.storage is not None - tx2 = tx.storage.get_transaction(h) + tx2 = self.tx_storage.get_transaction(h) tx2_meta = tx2.get_metadata() tx2_voided_by: set[bytes] = tx2_meta.voided_by or set() if NC_EXECUTION_FAIL_ID in tx2_voided_by: @@ -275,12 +301,7 @@ def _filter_out_nc_fail_entries(self, tx: BaseTransaction, voided_by: set[bytes] assert NC_EXECUTION_FAIL_ID not in ret return ret - def _remove_transactions( - self, - txs: list[BaseTransaction], - storage: TransactionStorage, - context: ConsensusAlgorithmContext, - ) -> None: + def _remove_transactions(self, txs: list[BaseTransaction], context: ConsensusAlgorithmContext) -> None: """Will remove all the transactions on the list from the database. Special notes: @@ -318,68 +339,63 @@ def _remove_transactions( spent_tx_meta.spent_outputs[tx_input.index].remove(tx.hash) context.save(spent_tx) for parent_hash, children_to_remove in parents_to_update.items(): - parent_tx = storage.get_transaction(parent_hash) + parent_tx = self.tx_storage.get_transaction(parent_hash) for child in children_to_remove: - storage.vertex_children.remove_child(parent_tx, child) + self.tx_storage.vertex_children.remove_child(parent_tx, child) context.save(parent_tx) for tx in txs: self.log.debug('remove transaction', tx=tx.hash_hex) - storage.remove_transaction(tx) + self.tx_storage.remove_transaction(tx) - def _compute_vertices_that_became_invalid( - self, - storage: TransactionStorage, - new_best_height: int, - ) -> list[BaseTransaction]: + def _compute_vertices_that_became_invalid(self, *, new_best_block: Block) -> list[BaseTransaction]: """This method will look for transactions in the mempool that have become invalid after a reorg.""" from hathor.transaction.storage.traversal import BFSTimestampWalk from hathor.transaction.validation_state import ValidationState - assert storage.indexes is not None - assert storage.indexes.mempool_tips is not None - mempool_tips = list(storage.indexes.mempool_tips.iter(storage)) + mempool_tips = list(self.tx_storage.indexes.mempool_tips.iter(self.tx_storage)) if not mempool_tips: # Mempool is empty, nothing to remove. return [] mempool_rules: tuple[Callable[[Transaction], bool], ...] = ( - lambda tx: self._reward_lock_mempool_rule(tx, new_best_height), - lambda tx: self._unknown_contract_mempool_rule(tx), - lambda tx: self._nano_activation_rule(storage, tx), - self._checkdatasig_count_rule, + lambda tx: self._reward_lock_mempool_rule(tx, new_best_block.get_height()), + lambda tx: self._feature_activation_rules(tx, new_best_block), + self._unknown_contract_mempool_rule, ) - mempool_origin_bfs = BFSTimestampWalk( - storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=False + find_invalid_bfs = BFSTimestampWalk( + self.tx_storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=False ) invalid_txs: set[BaseTransaction] = set() # Run a right-to-left BFS starting from the mempool tips. - for tx in mempool_origin_bfs.run(mempool_tips, skip_root=False): + for tx in find_invalid_bfs.run(mempool_tips, skip_root=False): if not isinstance(tx, Transaction): - mempool_origin_bfs.skip_neighbors(tx) + find_invalid_bfs.skip_neighbors() continue - assert isinstance(tx, Transaction) if tx.get_metadata().first_block is not None: - mempool_origin_bfs.skip_neighbors(tx) + find_invalid_bfs.skip_neighbors() continue # At this point, it's a mempool tx, so we have to re-verify it. if not all(rule(tx) for rule in mempool_rules): invalid_txs.add(tx) + find_invalid_bfs.add_neighbors() # From the invalid txs, mark all vertices to the right as invalid. This includes both txs and blocks. - to_remove: set[BaseTransaction] = set() + to_remove: list[BaseTransaction] = [] find_to_remove_bfs = BFSTimestampWalk( - storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True + self.tx_storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True ) for vertex in find_to_remove_bfs.run(invalid_txs, skip_root=False): vertex.set_validation(ValidationState.INVALID) - to_remove.add(vertex) + to_remove.append(vertex) + find_to_remove_bfs.add_neighbors() - return sorted(to_remove, reverse=True, key=lambda tx: tx.timestamp) + to_remove.reverse() + return to_remove def _reward_lock_mempool_rule(self, tx: Transaction, new_best_height: int) -> bool: """ @@ -414,26 +430,67 @@ def _unknown_contract_mempool_rule(self, tx: Transaction) -> bool: return False return True - def _nano_activation_rule(self, storage: TransactionStorage, tx: Transaction) -> bool: + def _feature_activation_rules(self, tx: Transaction, new_best_block: Block) -> bool: + """Check whether a tx became invalid because of some feature state of the new best block.""" + features = self.feature_service.get_feature_states(vertex=new_best_block) + + for feature, feature_state in features.items(): + is_active = feature_state.is_active() + match feature: + case Feature.NANO_CONTRACTS: + if not self._nano_activation_rule(tx, is_active): + return False + case Feature.FEE_TOKENS: + if not self._fee_tokens_activation_rule(tx, is_active): + return False + case Feature.COUNT_CHECKDATASIG_OP: + if not self._checkdatasig_count_rule(tx): + return False + case Feature.OPCODES_V2: + if not self._opcodes_v2_activation_rule(tx, new_best_block): + return False + case ( + Feature.INCREASE_MAX_MERKLE_PATH_LENGTH + | Feature.NOP_FEATURE_1 + | Feature.NOP_FEATURE_2 + | Feature.NOP_FEATURE_3 + ): + # These features do not affect transactions. + pass + case _: + assert_never(feature) + + return True + + def _nano_activation_rule(self, tx: Transaction, is_active: bool) -> bool: """Check whether a tx became invalid because the reorg changed the nano feature activation state.""" from hathor.nanocontracts import OnChainBlueprint - from hathor.nanocontracts.utils import is_nano_active - from hathor.transaction.token_creation_tx import TokenCreationTransaction - from hathor.transaction.token_info import TokenVersion - best_block = storage.get_best_block() - if is_nano_active(settings=self._settings, block=best_block, feature_service=self.feature_service): + if is_active: # When nano is active, this rule has no effect. return True - # The nano feature activation is actually used to enable 4 use cases: - + # The nano feature activation is actually used to enable 2 use cases: if tx.is_nano_contract(): return False if isinstance(tx, OnChainBlueprint): return False + return True + + def _fee_tokens_activation_rule(self, tx: Transaction, is_active: bool) -> bool: + """ + Check whether a tx became invalid because the reorg changed the fee-based tokens feature activation state. + """ + from hathor.transaction.token_creation_tx import TokenCreationTransaction + from hathor.transaction.token_info import TokenVersion + + if is_active: + # When fee-based tokens feature is active, this rule has no effect. + return True + + # The fee-based tokens feature activation is actually used to enable 2 use cases: if isinstance(tx, TokenCreationTransaction) and tx.token_version == TokenVersion.FEE: return False @@ -443,17 +500,52 @@ def _nano_activation_rule(self, storage: TransactionStorage, tx: Transaction) -> return True def _checkdatasig_count_rule(self, tx: Transaction) -> bool: - """Check whether a tx became invalid because the reorg changed the checkdatasig feature activation state.""" + """Check whether a tx became invalid because of the count checkdatasig feature.""" from hathor.verification.vertex_verifier import VertexVerifier + # We check all txs regardless of the feature state, because this rule + # already prohibited mempool txs before the block feature activation. # Any exception in the sigops verification will be considered # a fail and the tx will be removed from the mempool. try: VertexVerifier._verify_sigops_output(settings=self._settings, vertex=tx, enable_checkdatasig_count=True) - except Exception: + except Exception as e: + if not isinstance(e, TooManySigOps): + self.log.exception('unexpected exception in mempool-reverification') return False return True + def _opcodes_v2_activation_rule(self, tx: Transaction, new_best_block: Block) -> bool: + """Check whether a tx became invalid because of the opcodes V2 feature.""" + from hathor.verification.nano_header_verifier import NanoHeaderVerifier + from hathor.verification.transaction_verifier import TransactionVerifier + + # We check all txs regardless of the feature state, because this rule + # already prohibited mempool txs before the block feature activation. + + params = VerificationParams.default_for_mempool(best_block=new_best_block) + + # Any exception in the inputs verification will be considered + # a fail and the tx will be removed from the mempool. + try: + TransactionVerifier._verify_inputs(self._settings, tx, params, skip_script=False) + except Exception as e: + if not isinstance(e, InvalidInputData): + self.log.exception('unexpected exception in mempool-reverification') + return False + + # Any exception in the nc_signature verification will be considered + # a fail and the tx will be removed from the mempool. + if tx.is_nano_contract(): + try: + NanoHeaderVerifier._verify_nc_signature(self._settings, tx, params) + except Exception as e: + if not isinstance(e, NCInvalidSignature): + self.log.exception('unexpected exception in mempool-reverification') + return False + + return True + def _sorted_affected_txs(affected_txs: set[BaseTransaction]) -> list[BaseTransaction]: """ diff --git a/hathor/consensus/context.py b/hathor/consensus/context.py index ae9bb2f5d..83fb92e41 100644 --- a/hathor/consensus/context.py +++ b/hathor/consensus/context.py @@ -19,7 +19,6 @@ from structlog import get_logger -from hathor.pubsub import PubSubManager from hathor.transaction import BaseTransaction, Block, Transaction if TYPE_CHECKING: @@ -45,7 +44,6 @@ class ConsensusAlgorithmContext: """ __slots__ = ( 'consensus', - 'pubsub', 'block_algorithm', 'transaction_algorithm', 'txs_affected', @@ -55,7 +53,6 @@ class ConsensusAlgorithmContext: ) consensus: 'ConsensusAlgorithm' - pubsub: PubSubManager block_algorithm: 'BlockConsensusAlgorithm' transaction_algorithm: 'TransactionConsensusAlgorithm' txs_affected: set[BaseTransaction] @@ -63,9 +60,8 @@ class ConsensusAlgorithmContext: nc_events: list[tuple[Transaction, list[NCEvent]]] | None nc_exec_success: list[Transaction] - def __init__(self, consensus: 'ConsensusAlgorithm', pubsub: PubSubManager) -> None: + def __init__(self, consensus: 'ConsensusAlgorithm') -> None: self.consensus = consensus - self.pubsub = pubsub self.block_algorithm = self.consensus.block_algorithm_factory(self) self.transaction_algorithm = self.consensus.transaction_algorithm_factory(self) self.txs_affected = set() diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py index 6187c256c..6fccf115e 100644 --- a/hathor/consensus/transaction_consensus.py +++ b/hathor/consensus/transaction_consensus.py @@ -17,6 +17,7 @@ from structlog import get_logger from hathor.conf.get_settings import get_global_settings +from hathor.execution_manager import non_critical_code from hathor.transaction import BaseTransaction, Block, Transaction, TxInput from hathor.types import VertexId from hathor.util import classproperty @@ -228,7 +229,9 @@ def update_voided_info(self, tx: Transaction) -> None: if voided_by: meta.voided_by = voided_by.copy() self.context.save(tx) - tx.storage.del_from_indexes(tx) + tx.storage.indexes.del_from_critical_indexes(tx) + with non_critical_code(self.log): + tx.storage.indexes.del_from_non_critical_indexes(tx) # Check conflicts of the transactions voiding us. for h in voided_by: @@ -391,7 +394,7 @@ def remove_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: meta2 = tx2.get_metadata() if not (meta2.voided_by and voided_hash in meta2.voided_by): - bfs.skip_neighbors(tx2) + bfs.skip_neighbors() continue if meta2.voided_by: meta2.voided_by.discard(voided_hash) @@ -399,9 +402,11 @@ def remove_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: check_list.append(tx2) if not meta2.voided_by: meta2.voided_by = None - tx.storage.add_to_indexes(tx2) + with non_critical_code(self.log): + tx.storage.indexes.add_to_non_critical_indexes(tx2) self.context.save(tx2) self.assert_valid_consensus(tx2) + bfs.add_neighbors() from hathor.transaction import Transaction for tx2 in check_list: @@ -502,8 +507,11 @@ def add_voided_by(self, tx: Transaction, voided_hash: bytes, *, is_dag_verificat # All voided transactions with conflicts must have their accumulated weight calculated. tx2.update_accumulated_weight(save_file=False) self.context.save(tx2) - tx2.storage.del_from_indexes(tx2, relax_assert=True) + tx2.storage.indexes.del_from_critical_indexes(tx2) + with non_critical_code(self.log): + tx2.storage.indexes.del_from_non_critical_indexes(tx2) self.assert_valid_consensus(tx2) + bfs.add_neighbors() for tx2 in check_list: self.check_conflicts(tx2) diff --git a/hathor/dag_builder/artifacts.py b/hathor/dag_builder/artifacts.py index c183f4358..afa930473 100644 --- a/hathor/dag_builder/artifacts.py +++ b/hathor/dag_builder/artifacts.py @@ -14,7 +14,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, NamedTuple, Sequence, TypeVar +from typing import TYPE_CHECKING, Callable, Iterator, NamedTuple, Sequence, TypeVar from hathor.dag_builder.types import DAGNode from hathor.manager import HathorManager @@ -42,6 +42,11 @@ def __init__(self, items: Iterator[tuple[DAGNode, BaseTransaction]]) -> None: self.list: tuple[_Pair, ...] = tuple(v) self._last_propagated: str | None = None + self._step_fns: list[Callable[[DAGNode, BaseTransaction], None]] = [] + + def register_step_fn(self, step_fn: Callable[[DAGNode, BaseTransaction], None]) -> None: + """Register a new step function to be called between vertex propagations.""" + self._step_fns.append(step_fn) def get_typed_vertex(self, name: str, type_: type[T]) -> T: """Get a vertex by name, asserting it is of the provided type.""" @@ -83,6 +88,8 @@ def propagate_with( assert manager.vertex_handler.on_new_relayed_vertex(vertex) except Exception as e: raise Exception(f'failed on_new_tx({node.name})') from e + for step_fn in self._step_fns: + step_fn(node, vertex) self._last_propagated = node.name if node.name == self._last_propagated: diff --git a/hathor/dag_builder/vertex_exporter.py b/hathor/dag_builder/vertex_exporter.py index b19b5a986..fb26f93fd 100644 --- a/hathor/dag_builder/vertex_exporter.py +++ b/hathor/dag_builder/vertex_exporter.py @@ -289,6 +289,7 @@ def create_vertex_block(self, node: DAGNode) -> Block: def _get_ast_value_bytes(self, ast_node: ast.AST) -> bytes: if isinstance(ast_node, ast.Constant): + assert isinstance(ast_node.value, str) return bytes.fromhex(ast_node.value) elif isinstance(ast_node, ast.Name): return self.get_vertex_id(ast_node.id) diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index 0d22ca402..65228fa9b 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -142,6 +142,7 @@ class TxDataWithoutMeta(BaseEventData, extra=Extra.ignore): token_symbol: Optional[str] aux_pow: Optional[str] = None headers: list[TxHeader] = [] + name: str | None @classmethod def from_event_arguments(cls, args: EventArguments) -> Self: @@ -178,6 +179,7 @@ def from_event_arguments(cls, args: EventArguments) -> Self: ) tx_json['headers'] = headers + tx_json['name'] = args.tx.name return cls(**tx_json) diff --git a/hathor/execution_manager.py b/hathor/execution_manager.py index e3336430f..154faa375 100644 --- a/hathor/execution_manager.py +++ b/hathor/execution_manager.py @@ -13,8 +13,10 @@ # limitations under the License. import sys -from typing import Callable, NoReturn +from contextlib import contextmanager +from typing import Callable, Iterator, NoReturn +import structlog from structlog import get_logger from hathor.reactor import ReactorProtocol @@ -63,3 +65,12 @@ def crash_and_exit(self, *, reason: str) -> NoReturn: self._reactor.stop() self._reactor.crash() sys.exit(-1) + + +@contextmanager +def non_critical_code(log: structlog.stdlib.BoundLogger) -> Iterator[None]: + """Use this context manager to ignore all exceptions in the contained code.""" + try: + yield + except BaseException: + log.exception('ignoring error in non-critical code') diff --git a/hathor/feature_activation/feature.py b/hathor/feature_activation/feature.py index 4e5671093..480ef5685 100644 --- a/hathor/feature_activation/feature.py +++ b/hathor/feature_activation/feature.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from enum import Enum, unique +from enum import StrEnum, unique @unique -class Feature(str, Enum): +class Feature(StrEnum): """ An enum containing all features that participate in the feature activation process, past or future, activated or not, for all networks. Features should NOT be removed from this enum, to preserve history. Their values @@ -30,5 +30,6 @@ class Feature(str, Enum): INCREASE_MAX_MERKLE_PATH_LENGTH = 'INCREASE_MAX_MERKLE_PATH_LENGTH' COUNT_CHECKDATASIG_OP = 'COUNT_CHECKDATASIG_OP' - NANO_CONTRACTS = 'NANO_CONTRACTS' + FEE_TOKENS = 'FEE_TOKENS' + OPCODES_V2 = 'OPCODES_V2' diff --git a/hathor/feature_activation/utils.py b/hathor/feature_activation/utils.py new file mode 100644 index 000000000..774707bac --- /dev/null +++ b/hathor/feature_activation/utils.py @@ -0,0 +1,77 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, assert_never + +from hathor.feature_activation.feature import Feature +from hathor.feature_activation.model.feature_state import FeatureState +from hathor.transaction.scripts.opcode import OpcodesVersion + +if TYPE_CHECKING: + from hathor.conf.settings import FeatureSetting, HathorSettings + from hathor.feature_activation.feature_service import FeatureService + from hathor.transaction import Vertex + + +@dataclass(slots=True, frozen=True, kw_only=True) +class Features: + """A dataclass holding state information about features from the Feature Activation process.""" + + count_checkdatasig_op: bool + nanocontracts: bool + fee_tokens: bool + opcodes_version: OpcodesVersion + + @staticmethod + def from_vertex(*, settings: HathorSettings, feature_service: FeatureService, vertex: Vertex) -> Features: + """Return whether the Nano Contracts feature is active according to the provided settings and vertex.""" + from hathor.conf.settings import FeatureSetting + feature_states = feature_service.get_feature_states(vertex=vertex) + feature_settings = { + Feature.COUNT_CHECKDATASIG_OP: FeatureSetting.FEATURE_ACTIVATION, + Feature.NANO_CONTRACTS: settings.ENABLE_NANO_CONTRACTS, + Feature.FEE_TOKENS: settings.ENABLE_FEE_BASED_TOKENS, + Feature.OPCODES_V2: settings.ENABLE_OPCODES_V2, + } + + feature_is_active: dict[Feature, bool] = { + feature: _is_feature_active(setting, feature_states.get(feature, FeatureState.DEFINED)) + for feature, setting in feature_settings.items() + } + + opcodes_version = OpcodesVersion.V2 if feature_is_active[Feature.OPCODES_V2] else OpcodesVersion.V1 + + return Features( + count_checkdatasig_op=feature_is_active[Feature.COUNT_CHECKDATASIG_OP], + nanocontracts=feature_is_active[Feature.NANO_CONTRACTS], + fee_tokens=feature_is_active[Feature.FEE_TOKENS], + opcodes_version=opcodes_version, + ) + + +def _is_feature_active(setting: FeatureSetting, state: FeatureState) -> bool: + """Return whether a feature is active based on the setting and state.""" + from hathor.conf.settings import FeatureSetting + match setting: + case FeatureSetting.DISABLED: + return False + case FeatureSetting.ENABLED: + return True + case FeatureSetting.FEATURE_ACTIVATION: + return state.is_active() + case _: # pragma: no cover + assert_never(setting) diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index 6f5e95971..3b55c836e 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -31,6 +31,7 @@ from hathor.indexes.mempool_tips_index import MempoolTipsIndex from hathor.indexes.nc_creation_index import NCCreationIndex from hathor.indexes.nc_history_index import NCHistoryIndex +from hathor.indexes.scope import Scope from hathor.indexes.timestamp_index import ScopeType as TimestampScopeType, TimestampIndex from hathor.indexes.tokens_index import TokensIndex from hathor.indexes.utxo_index import UtxoIndex @@ -65,7 +66,7 @@ class IndexesManager(ABC): sorted_txs: TimestampIndex height: HeightIndex - mempool_tips: Optional[MempoolTipsIndex] + mempool_tips: MempoolTipsIndex addresses: Optional[AddressIndex] tokens: Optional[TokensIndex] utxo: Optional[UtxoIndex] @@ -119,22 +120,11 @@ def enable_utxo_index(self) -> None: """Enable UTXO index. It does nothing if it has already been enabled.""" raise NotImplementedError - @abstractmethod - def enable_mempool_index(self) -> None: - """Enable mempool index. It does nothing if it has already been enabled.""" - raise NotImplementedError - @abstractmethod def enable_nc_indexes(self) -> None: """Enable Nano Contract related indexes.""" raise NotImplementedError - def force_clear_all(self) -> None: - """ Force clear all indexes. - """ - for index in self.iter_all_indexes(): - index.force_clear() - def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: """ Initialize the indexes, checking the indexes that need initialization, and the optimal iterator to use. """ @@ -153,7 +143,8 @@ def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: indexes_to_init.append(index) if indexes_to_init: - self.log.info('there are indexes that need initialization', indexes_to_init=indexes_to_init) + indexes_names = [type(index).__name__ for index in indexes_to_init] + self.log.info('there are indexes that need initialization', indexes_to_init=indexes_names) else: self.log.info('there are no indexes that need initialization') @@ -167,24 +158,29 @@ def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: cache_capacity = None # Reduce cache size during initialization. - from hathor.transaction.storage import TransactionCacheStorage - if isinstance(tx_storage, TransactionCacheStorage): - cache_capacity = tx_storage.capacity - tx_storage.set_capacity(min(MAX_CACHE_SIZE_DURING_LOAD, cache_capacity)) + if cache_data := tx_storage.get_cache_data(): + cache_capacity = cache_data.capacity + tx_storage.set_cache_capacity(min(MAX_CACHE_SIZE_DURING_LOAD, cache_capacity)) self.log.debug('indexes pre-init') for index in self.iter_all_indexes(): index.init_start(self) if indexes_to_init: - overall_scope = reduce(operator.__or__, map(lambda i: i.get_scope(), indexes_to_init)) + overall_scope: Scope = reduce(operator.__or__, map(lambda i: i.get_scope(), indexes_to_init)) tx_iter_inner = overall_scope.get_iterator(tx_storage) - tx_iter = tx_progress(tx_iter_inner, log=self.log, total=tx_storage.get_vertices_count()) + tx_iter = tx_progress( + tx_iter_inner, + log=self.log, + total=tx_storage.get_vertices_count(), + show_height_and_ts=overall_scope.topological_order, + ) self.log.debug('indexes init', scope=overall_scope) else: tx_iter = iter([]) self.log.debug('indexes init') + self.log.info('initializing indexes...') for tx in tx_iter: # feed each transaction to the indexes that they are interested in for index in indexes_to_init: @@ -192,19 +188,19 @@ def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: index.init_loop_step(tx) # Restore cache capacity. - if isinstance(tx_storage, TransactionCacheStorage): - assert cache_capacity is not None - tx_storage.set_capacity(cache_capacity) + assert cache_capacity is not None + tx_storage.set_cache_capacity(cache_capacity) - def update(self, tx: BaseTransaction) -> None: + def update_critical_indexes(self, tx: BaseTransaction) -> None: """ This is the new update method that indexes should use instead of add_tx/del_tx """ - if self.mempool_tips: - self.mempool_tips.update(tx) + self.mempool_tips.update(tx) + + def update_non_critical_indexes(self, tx: BaseTransaction) -> None: if self.utxo: self.utxo.update(tx) - def handle_contract_execution(self, tx: BaseTransaction) -> None: + def non_critical_handle_contract_execution(self, tx: BaseTransaction) -> None: """ Update indexes according to a Nano Contract execution. Must be called only once for each time a contract is executed. @@ -276,7 +272,7 @@ def handle_contract_execution(self, tx: BaseTransaction) -> None: case _: assert_never(record) - def handle_contract_unexecution(self, tx: BaseTransaction) -> None: + def non_critical_handle_contract_unexecution(self, tx: BaseTransaction) -> None: """ Update indexes according to a Nano Contract unexecution, which happens when a reorg unconfirms a nano tx. Must be called only once for each time a contract is unexecuted. @@ -342,7 +338,7 @@ def handle_contract_unexecution(self, tx: BaseTransaction) -> None: case _: assert_never(record) - def add_tx(self, tx: BaseTransaction) -> bool: + def add_to_non_critical_indexes(self, tx: BaseTransaction) -> bool: """ Add a transaction to the indexes :param tx: Transaction to be added @@ -380,7 +376,14 @@ def add_tx(self, tx: BaseTransaction) -> bool: return r2 - def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: bool = False) -> None: + def del_from_critical_indexes(self, tx: BaseTransaction) -> None: + assert tx.storage is not None + # mempool will pick-up if the transaction is voided/invalid and remove it + if tx.storage.transaction_exists(tx.hash): + logger.debug('remove from mempool tips', tx=tx.hash_hex) + self.mempool_tips.update(tx, force_remove=True) + + def del_from_non_critical_indexes(self, tx: BaseTransaction, *, remove_all: bool = False) -> None: """ Delete a transaction from the indexes :param tx: Transaction to be deleted @@ -406,11 +409,6 @@ def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: self.blueprint_history.remove_tx(tx) self.info.update_counts(tx, remove=True) - # mempool will pick-up if the transaction is voided/invalid and remove it - if self.mempool_tips is not None and tx.storage.transaction_exists(tx.hash): - logger.debug('remove from mempool tips', tx=tx.hash_hex) - self.mempool_tips.update(tx, force_remove=True) - if tx.is_block: self.sorted_blocks.del_tx(tx) else: @@ -422,6 +420,7 @@ def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: class RocksDBIndexesManager(IndexesManager): def __init__(self, rocksdb_storage: 'RocksDBStorage', *, settings: HathorSettings) -> None: + from hathor.indexes.memory_mempool_tips_index import MemoryMempoolTipsIndex from hathor.indexes.rocksdb_height_index import RocksDBHeightIndex from hathor.indexes.rocksdb_info_index import RocksDBInfoIndex from hathor.indexes.rocksdb_timestamp_index import RocksDBTimestampIndex @@ -431,6 +430,8 @@ def __init__(self, rocksdb_storage: 'RocksDBStorage', *, settings: HathorSetting self.info = RocksDBInfoIndex(self._db, settings=settings) self.height = RocksDBHeightIndex(self._db, settings=settings) + # XXX: use of RocksDBMempoolTipsIndex is very slow and was suspended + self.mempool_tips = MemoryMempoolTipsIndex(settings=self.settings) self.sorted_all = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.ALL, settings=settings) self.sorted_blocks = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.BLOCKS, settings=settings) @@ -439,7 +440,6 @@ def __init__(self, rocksdb_storage: 'RocksDBStorage', *, settings: HathorSetting self.addresses = None self.tokens = None self.utxo = None - self.mempool_tips = None self.nc_creation = None self.nc_history = None self.blueprints = None @@ -463,12 +463,6 @@ def enable_utxo_index(self) -> None: if self.utxo is None: self.utxo = RocksDBUtxoIndex(self._db, settings=self.settings) - def enable_mempool_index(self) -> None: - from hathor.indexes.memory_mempool_tips_index import MemoryMempoolTipsIndex - if self.mempool_tips is None: - # XXX: use of RocksDBMempoolTipsIndex is very slow and was suspended - self.mempool_tips = MemoryMempoolTipsIndex(settings=self.settings) - def enable_nc_indexes(self) -> None: from hathor.indexes.blueprint_timestamp_index import BlueprintTimestampIndex from hathor.indexes.rocksdb_blueprint_history_index import RocksDBBlueprintHistoryIndex diff --git a/hathor/indexes/mempool_tips_index.py b/hathor/indexes/mempool_tips_index.py index fb3b8e5b2..1d605e4c2 100644 --- a/hathor/indexes/mempool_tips_index.py +++ b/hathor/indexes/mempool_tips_index.py @@ -202,12 +202,13 @@ def iter_all(self, tx_storage: 'TransactionStorage') -> Iterator[Transaction]: bfs = BFSTimestampWalk(tx_storage, is_dag_verifications=True, is_dag_funds=True, is_left_to_right=False) for tx in bfs.run(self.iter(tx_storage), skip_root=False): if not isinstance(tx, Transaction): - bfs.skip_neighbors(tx) + bfs.skip_neighbors() continue if tx.get_metadata().first_block is not None: - bfs.skip_neighbors(tx) + bfs.skip_neighbors() else: yield tx + bfs.add_neighbors() def get(self) -> set[bytes]: return set(iter(self._index)) diff --git a/hathor/indexes/nc_history_index.py b/hathor/indexes/nc_history_index.py index 6099ccaa1..4c4d76b26 100644 --- a/hathor/indexes/nc_history_index.py +++ b/hathor/indexes/nc_history_index.py @@ -70,6 +70,11 @@ def get_newest(self, contract_id: bytes) -> Iterable[bytes]: """ return self._get_sorted_from_key(contract_id, reverse=True) + def get_oldest(self, contract_id: bytes) -> Iterable[bytes]: + """Get a list of tx_ids sorted by timestamp for a given contract_id starting from the oldest. + """ + return self._get_sorted_from_key(contract_id, reverse=False) + def get_older(self, contract_id: bytes, tx_start: Optional[BaseTransaction] = None) -> Iterable[bytes]: """Get a list of tx_ids sorted by timestamp for a given contract_id that are older than tx_start. """ diff --git a/hathor/indexes/rocksdb_timestamp_index.py b/hathor/indexes/rocksdb_timestamp_index.py index eb8927d60..22d3f2a7a 100644 --- a/hathor/indexes/rocksdb_timestamp_index.py +++ b/hathor/indexes/rocksdb_timestamp_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Iterator, Optional +from typing import TYPE_CHECKING, Any, Iterator, Optional from structlog import get_logger @@ -93,7 +93,7 @@ def _iter(self, from_timestamp: Optional[int] = None, from_tx: Optional[bytes] = """ if from_timestamp is None and from_tx is not None: raise ValueError('from_tx needs from_timestamp, but it is None') - it = self._db.iterkeys(self._cf) + it: Any = self._db.iterkeys(self._cf) if reverse: it = reversed(it) if from_timestamp is None: diff --git a/hathor/indexes/rocksdb_tokens_index.py b/hathor/indexes/rocksdb_tokens_index.py index 3cb3935ff..f00b1f757 100644 --- a/hathor/indexes/rocksdb_tokens_index.py +++ b/hathor/indexes/rocksdb_tokens_index.py @@ -14,7 +14,7 @@ from dataclasses import asdict, dataclass from enum import Enum -from typing import TYPE_CHECKING, Iterator, NamedTuple, Optional, cast +from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Optional, cast from structlog import get_logger from typing_extensions import assert_never, override @@ -542,7 +542,7 @@ def _iter_transactions(self, token_uid: bytes, from_tx: Optional[_TxIndex] = Non *, reverse: bool = False) -> Iterator[bytes]: """ Iterate over all transactions of a token, by default from oldest to newest. """ - it = self._db.iterkeys(self._cf) + it: Any = self._db.iterkeys(self._cf) seek_key = self._to_key_txs(token_uid, from_tx) self.log.debug('seek to', token_uid=token_uid.hex(), key=seek_key.hex()) if reverse: diff --git a/hathor/indexes/rocksdb_tx_group_index.py b/hathor/indexes/rocksdb_tx_group_index.py index 611e8c75e..7f8dbd806 100644 --- a/hathor/indexes/rocksdb_tx_group_index.py +++ b/hathor/indexes/rocksdb_tx_group_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import Callable, Iterator, Optional, Sized, TypeVar +from typing import Any, Callable, Generic, Iterator, Optional, Sized, TypeVar import rocksdb from structlog import get_logger @@ -31,7 +31,7 @@ GROUP_COUNT_VALUE_SIZE = 4 # in bytes -class _RocksDBTxGroupStatsIndex(RocksDBIndexUtils): +class _RocksDBTxGroupStatsIndex(RocksDBIndexUtils, Generic[KT]): def __init__( self, db: rocksdb.DB, @@ -157,7 +157,7 @@ def _get_sorted_from_key( reverse: bool = False ) -> Iterator[bytes]: self.log.debug('seek to', key=key) - it = self._db.iterkeys(self._cf) + it: Any = self._db.iterkeys(self._cf) if reverse: it = reversed(it) # when reversed we increment the key by 1, which effectively goes to the end of a prefix @@ -191,7 +191,7 @@ def _is_key_empty(self, key: KT) -> bool: @override def get_latest_tx_timestamp(self, key: KT) -> int | None: - it = self._db.iterkeys(self._cf) + it: Any = self._db.iterkeys(self._cf) it = reversed(it) # when reversed we increment the key by 1, which effectively goes to the end of a prefix it.seek_for_prev(incr_key(self._to_rocksdb_key(key))) diff --git a/hathor/indexes/rocksdb_vertex_timestamp_index.py b/hathor/indexes/rocksdb_vertex_timestamp_index.py index 6fae6bf9b..e22a63a6f 100644 --- a/hathor/indexes/rocksdb_vertex_timestamp_index.py +++ b/hathor/indexes/rocksdb_vertex_timestamp_index.py @@ -14,7 +14,7 @@ import struct from abc import ABC -from typing import Iterator, final +from typing import Any, Iterator, final import rocksdb from structlog import get_logger @@ -99,7 +99,7 @@ def _iter_sorted( reverse: bool, inclusive: bool = False, ) -> Iterator[bytes]: - it = self._db.iterkeys(self._cf) + it: Any = self._db.iterkeys(self._cf) if reverse: it = reversed(it) if tx_start is None: diff --git a/hathor/indexes/scope.py b/hathor/indexes/scope.py index 0a1e84c35..e0f0293fd 100644 --- a/hathor/indexes/scope.py +++ b/hathor/indexes/scope.py @@ -61,31 +61,22 @@ def matches(self, tx: BaseTransaction) -> bool: return False if not tx_meta.validation.is_fully_connected() and not self.include_partial: return False - # XXX: self.topologial_order doesn't affect self.match() + # XXX: self.topological_order doesn't affect self.match() # passed all checks return True def get_iterator(self, tx_storage: 'TransactionStorage') -> Iterator[BaseTransaction]: - """ This method returns an iterator that only yields transaction that match the current scope. """ - iterator: Iterator[BaseTransaction] - # XXX: this is to mark if the chosen iterator will yield partial transactions - iterator_covers_partial: bool - if self.topological_order: - iterator = tx_storage.topological_iterator() - iterator_covers_partial = False - else: - iterator = tx_storage.get_all_transactions() - iterator_covers_partial = True - for tx in iterator: - if self.matches(tx): - yield tx - if self.include_partial and not iterator_covers_partial: - # if partial transactions are needed and were not already covered, we use get_all_transactions, which - # includes partial transactions, to yield them, skipping all that aren't partial + This method returns an iterator that yields alls transactions in respect to this Scope's ordering only, + disregarding whether the tx matches the Scope or not. It's the caller's responsibility to match them. + """ + if not self.topological_order: + yield from tx_storage.get_all_transactions() + return + + yield from tx_storage.topological_iterator() + if self.include_partial: for tx in tx_storage.get_all_transactions(): tx_meta = tx.get_metadata() - if tx_meta.validation.is_fully_connected(): - continue - if self.matches(tx): + if not tx_meta.validation.is_fully_connected(): yield tx diff --git a/hathor/manager.py b/hathor/manager.py index e48249c2d..99c5c18cc 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -19,7 +19,6 @@ from enum import Enum from typing import TYPE_CHECKING, Iterator, Optional, Union -from hathorlib.base_transaction import tx_or_block_from_bytes as lib_tx_or_block_from_bytes from structlog import get_logger from twisted.internet import defer from twisted.internet.defer import Deferred @@ -52,7 +51,7 @@ from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer from hathor.p2p.peer_id import PeerId -from hathor.pubsub import HathorEvents, PubSubManager +from hathor.pubsub import EventArguments, HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol as Reactor from hathor.reward_lock import is_spent_reward_locked from hathor.stratum import StratumFactory @@ -68,6 +67,7 @@ from hathor.verification.verification_service import VerificationService from hathor.vertex_handler import VertexHandler from hathor.wallet import BaseWallet +from hathorlib.base_transaction import tx_or_block_from_bytes as lib_tx_or_block_from_bytes if TYPE_CHECKING: from hathor.websocket.factory import HathorAdminWebsocketFactory @@ -222,6 +222,7 @@ def __init__( if self.wallet: self.wallet.pubsub = self.pubsub self.wallet.reactor = self.reactor + self._subscribe_wallet(self.wallet) # It will be inject later by the builder. # XXX Remove this attribute after all dependencies are cleared. @@ -251,6 +252,14 @@ def __init__( self.lc_check_sync_state.clock = self.reactor self.lc_check_sync_state_interval = self.CHECK_SYNC_STATE_INTERVAL + def _subscribe_wallet(self, wallet: BaseWallet) -> None: + """Register a wallet on pubsub.""" + def handler(event: HathorEvents, args: EventArguments) -> None: + assert event == HathorEvents.NETWORK_NEW_TX_PROCESSING + wallet.on_new_tx(args.tx) + + self.pubsub.subscribe(HathorEvents.NETWORK_NEW_TX_PROCESSING, handler) + def get_default_capabilities(self) -> list[str]: """Return the default capabilities for this manager.""" default_capabilities = [ @@ -401,7 +410,9 @@ def get_nc_runner(self, block: Block) -> Runner: """Return a contract runner for a given block.""" nc_storage_factory = self.consensus_algorithm.nc_storage_factory block_storage = nc_storage_factory.get_block_storage_from_block(block) - return self.runner_factory.create(block_storage=block_storage) + return self.runner_factory.create( + block_storage=block_storage, + ) def get_best_block_nc_runner(self) -> Runner: """Return a contract runner for the best block.""" @@ -437,8 +448,6 @@ def _initialize_components(self) -> None: self.wallet._manually_initialize() self.tx_storage.pre_init() - assert self.tx_storage.indexes is not None - self._bit_signaling_service.start() started_at = int(time.time()) @@ -515,7 +524,6 @@ def _verify_checkpoints(self) -> None: This method needs the essential indexes to be already initialized. """ - assert self.tx_storage.indexes is not None # based on the current best-height, filter-out checkpoints that aren't expected to exist in the database best_height = self.tx_storage.get_height_best_block() expected_checkpoints = [cp for cp in self.checkpoints if cp.height <= best_height] diff --git a/hathor/metrics.py b/hathor/metrics.py index 35b3f9cae..ef5444707 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -26,7 +26,6 @@ from hathor.transaction.base_transaction import sum_weights from hathor.transaction.block import Block from hathor.transaction.storage import TransactionRocksDBStorage, TransactionStorage -from hathor.transaction.storage.cache_storage import TransactionCacheStorage if TYPE_CHECKING: from hathor.stratum import StratumFactory # noqa: F401 @@ -149,9 +148,9 @@ def _start_initial_values(self) -> None: self.hash_rate = self.calculate_new_hashrate(last_block[0]) self.best_block_height = self.tx_storage.get_height_best_block() - if isinstance(self.tx_storage, TransactionCacheStorage): - self.log.info("Transaction cache hits during initialization", hits=self.tx_storage.stats.get("hit")) - self.log.info("Transaction cache misses during initialization", misses=self.tx_storage.stats.get("miss")) + if cache_data := self.tx_storage.get_cache_data(): + self.log.info("Transaction cache hits during initialization", hits=cache_data.hit) + self.log.info("Transaction cache misses during initialization", misses=cache_data.miss) def start(self) -> None: self._start_initial_values() @@ -276,20 +275,13 @@ def collect_peer_connection_metrics(self) -> None: def set_cache_data(self) -> None: """ Collect and set data related to the transactions cache. """ - if isinstance(self.tx_storage, TransactionCacheStorage): - hits = self.tx_storage.stats.get("hit") - misses = self.tx_storage.stats.get("miss") - if hits: - self.transaction_cache_hits = hits - if misses: - self.transaction_cache_misses = misses + if cache_data := self.tx_storage.get_cache_data(): + self.transaction_cache_hits = cache_data.hit + self.transaction_cache_misses = cache_data.miss def set_tx_storage_data(self) -> None: store = self.tx_storage - if isinstance(self.tx_storage, TransactionCacheStorage): - store = self.tx_storage.store - if not isinstance(store, TransactionRocksDBStorage): # We currently only collect metrics for RocksDB return diff --git a/hathor/nanocontracts/allowed_imports.py b/hathor/nanocontracts/allowed_imports.py index 835c528da..31af914b4 100644 --- a/hathor/nanocontracts/allowed_imports.py +++ b/hathor/nanocontracts/allowed_imports.py @@ -64,5 +64,6 @@ NCParsedArgs=hathor.NCParsedArgs, sha3=hathor.sha3, verify_ecdsa=hathor.verify_ecdsa, + json_dumps=hathor.json_dumps, ), } diff --git a/hathor/nanocontracts/custom_builtins.py b/hathor/nanocontracts/custom_builtins.py index 81ff5add9..bc616c9e8 100644 --- a/hathor/nanocontracts/custom_builtins.py +++ b/hathor/nanocontracts/custom_builtins.py @@ -228,12 +228,13 @@ def __import__( name: str, globals: Mapping[str, object] | None = None, locals: Mapping[str, object] | None = None, - fromlist: Sequence[str] = (), + fromlist: Sequence[str] | None = None, level: int = 0, ) -> types.ModuleType: + fromlist_: Sequence[str] = fromlist or () if level != 0: raise ImportError('Relative imports are not allowed') - if not fromlist and name != 'typing': + if not fromlist_ and name != 'typing': # XXX: typing is allowed here because Foo[T] triggers a __import__('typing', fromlist=None) for some reason raise ImportError('Only `from ... import ...` imports are allowed') if name not in allowed_imports: @@ -241,12 +242,12 @@ def __import__( # Create a fake module class that will only be returned by this import call class FakeModule: - __slots__ = tuple(fromlist) + __slots__ = tuple(fromlist_) fake_module = FakeModule() allowed_fromlist = allowed_imports[name] - for import_what in fromlist: + for import_what in fromlist_: if import_what not in allowed_fromlist: raise ImportError(f'Import from "{name}.{import_what}" is not allowed.') diff --git a/hathor/nanocontracts/execution/__init__.py b/hathor/nanocontracts/execution/__init__.py new file mode 100644 index 000000000..8599bce1f --- /dev/null +++ b/hathor/nanocontracts/execution/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Nano contract block execution module.""" + +from hathor.nanocontracts.execution.block_executor import NCBlockExecutor + +__all__ = [ + 'NCBlockExecutor', +] diff --git a/hathor/nanocontracts/execution/block_executor.py b/hathor/nanocontracts/execution/block_executor.py new file mode 100644 index 000000000..ee8d24c97 --- /dev/null +++ b/hathor/nanocontracts/execution/block_executor.py @@ -0,0 +1,408 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""NCBlockExecutor - Executes nano contract transactions in a block.""" + +from __future__ import annotations + +import hashlib +import traceback +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Callable + +from structlog import get_logger +from typing_extensions import assert_never + +from hathor.execution_manager import non_critical_code +from hathor.nanocontracts.exception import NCFail +from hathor.transaction import Block, Transaction +from hathor.transaction.exceptions import TokenNotFound +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.types import MetaNCCallRecord + +if TYPE_CHECKING: + from hathor.conf.settings import HathorSettings + from hathor.consensus.context import ConsensusAlgorithmContext + from hathor.nanocontracts.nc_exec_logs import NCLogStorage + from hathor.nanocontracts.runner import Runner + from hathor.nanocontracts.runner.runner import RunnerFactory + from hathor.nanocontracts.sorter.types import NCSorterCallable + from hathor.nanocontracts.storage import NCBlockStorage, NCStorageFactory + + +@dataclass(slots=True, frozen=True) +class NCExecutionSuccess: + """Result type for successful NC execution.""" + runner: 'Runner' + + +@dataclass(slots=True, frozen=True) +class NCExecutionFailure: + """Result type for failed NC execution.""" + runner: 'Runner' + exception: 'NCFail' + traceback: str + + +@dataclass(slots=True, frozen=True) +class NCExecutionSkipped: + """Result type for skipped NC execution (voided transactions).""" + seqnum_update: tuple[bytes, int] | None # (nc_address, new_seqnum) or None + + +NCExecutionResult = NCExecutionSuccess | NCExecutionFailure | NCExecutionSkipped + +logger = get_logger() + +_base_transaction_log = logger.new() + + +class NCBlockExecutor: + """ + Executes all nano contract transactions in a block. + + This class contains the core NC execution logic, extracted from + BlockConsensusAlgorithm to allow reuse by debugging tools. + """ + + def __init__( + self, + *, + settings: 'HathorSettings', + runner_factory: 'RunnerFactory', + nc_storage_factory: 'NCStorageFactory', + nc_log_storage: 'NCLogStorage', + nc_calls_sorter: 'NCSorterCallable', + nc_exec_fail_trace: bool = False, + ) -> None: + """ + Initialize the block executor. + + Args: + settings: Hathor settings. + runner_factory: Factory to create Runner instances. + nc_storage_factory: Factory to create NC storage instances. + nc_log_storage: Storage for NC execution logs. + nc_calls_sorter: Function to sort NC transactions for deterministic execution order. + nc_exec_fail_trace: Whether to include stack traces in failure logs. + """ + self._settings = settings + self._runner_factory = runner_factory + self._nc_storage_factory = nc_storage_factory + self._nc_log_storage = nc_log_storage + self._nc_calls_sorter = nc_calls_sorter + self._nc_exec_fail_trace = nc_exec_fail_trace + + @property + def log(self) -> Any: + return _base_transaction_log + + def initialize_empty(self, block: Block, context: 'ConsensusAlgorithmContext') -> None: + """Initialize a block with an empty contract trie.""" + meta = block.get_metadata() + block_storage = self._nc_storage_factory.get_empty_block_storage() + block_storage.commit() + if meta.nc_block_root_id is not None: + assert meta.nc_block_root_id == block_storage.get_root_id() + else: + meta.nc_block_root_id = block_storage.get_root_id() + context.save(block) + + def execute_chain( + self, + block: Block, + context: 'ConsensusAlgorithmContext', + *, + on_failure: Callable[[Transaction], None], + ) -> None: + """Execute NC transactions for a block and any pending parent blocks, handling reorgs. + + This method determines which blocks need execution (handling reorgs) and + executes them in order from oldest to newest.""" + # If we reach this point, Nano Contracts must be enabled. + assert self._settings.ENABLE_NANO_CONTRACTS + assert not block.is_genesis + + meta = block.get_metadata() + if meta.voided_by: + # If the block is voided, skip execution. + return + + assert meta.nc_block_root_id is None + + to_be_executed: list[Block] = [] + is_reorg: bool = False + if context.reorg_info: + # handle reorgs + is_reorg = True + cur = block + # XXX We could stop when `cur_meta.nc_block_root_id is not None` but + # first we need to refactor meta.first_block and meta.voided_by to + # have different values per block. + while cur != context.reorg_info.common_block: + cur_meta = cur.get_metadata() + if cur_meta.nc_block_root_id is not None: + # Reset nc_block_root_id to force re-execution. + cur_meta.nc_block_root_id = None + to_be_executed.append(cur) + cur = cur.get_block_parent() + else: + # No reorg occurred, so we execute all unexecuted blocks. + # Normally it's just the current block, but it's possible to have + # voided and therefore unexecuted blocks connected to the best chain, + # for example when a block is voided by a transaction. + cur = block + while True: + cur_meta = cur.get_metadata() + if cur_meta.nc_block_root_id is not None: + break + to_be_executed.append(cur) + if cur.is_genesis: + break + cur = cur.get_block_parent() + + for current in to_be_executed[::-1]: + self.execute_block(current, context, is_reorg=is_reorg, on_failure=on_failure) + + def execute_block( + self, + block: Block, + context: 'ConsensusAlgorithmContext', + *, + is_reorg: bool, + on_failure: Callable[[Transaction], None], + ) -> None: + """Execute all NC transactions in a single block.""" + from hathor.nanocontracts import NC_EXECUTION_FAIL_ID + + assert self._settings.ENABLE_NANO_CONTRACTS + + if block.is_genesis: + # XXX We can remove this call after the full node initialization is refactored and + # the genesis block goes through the consensus protocol. + self.initialize_empty(block, context) + return + + meta = block.get_metadata() + assert not meta.voided_by + assert meta.nc_block_root_id is None + + parent = block.get_block_parent() + parent_meta = parent.get_metadata() + block_root_id = parent_meta.nc_block_root_id + assert block_root_id is not None + + nc_calls: list[Transaction] = [] + for tx in block.iter_transactions_in_this_block(): + if not tx.is_nano_contract(): + # Skip other type of transactions. + continue + tx_meta = tx.get_metadata() + assert tx_meta.nc_execution in {None, NCExecutionState.PENDING} + if tx_meta.voided_by: + assert NC_EXECUTION_FAIL_ID not in tx_meta.voided_by + nc_calls.append(tx) + + if not nc_calls: + meta.nc_block_root_id = block_root_id + context.save(block) + return + + nc_sorted_calls = self._nc_calls_sorter(block, nc_calls) + block_storage = self._nc_storage_factory.get_block_storage(block_root_id) + seed_hasher = hashlib.sha256(block.hash) + + for tx in nc_sorted_calls: + # Compute RNG seed for this transaction + seed_hasher.update(tx.hash) + seed_hasher.update(block_storage.get_root_id()) + rng_seed = seed_hasher.digest() + + result = self.execute_transaction( + tx=tx, + block_storage=block_storage, + rng_seed=rng_seed, + ) + + # Handle the execution result + tx_meta = tx.get_metadata() + match result: + case NCExecutionSuccess(runner=runner): + from hathor.nanocontracts.runner.call_info import CallType + + tx_meta.nc_execution = NCExecutionState.SUCCESS + context.save(tx) + + # Commit the runner changes + # TODO Avoid calling multiple commits for the same contract. The best would be + # to call the commit method once per contract per block, just like we do + # for the block_storage. This ensures we will have a clean database with + # no orphan nodes. + runner.commit() + + # Derive call_info, nc_calls, and events from runner + call_info = runner.get_last_call_info() + assert call_info.calls is not None + nc_calls_records = [ + MetaNCCallRecord.from_call_record(call) + for call in call_info.calls if call.type == CallType.PUBLIC + ] + events_list = call_info.nc_logger.__events__ + + # Update metadata with call records + assert tx_meta.nc_calls is None + tx_meta.nc_calls = nc_calls_records + context.save(tx) + + # Update indexes. This must be after metadata is updated. + assert tx.storage is not None + with non_critical_code(self.log): + tx.storage.indexes.non_critical_handle_contract_execution(tx) + + # Pubsub event to indicate execution success + context.nc_exec_success.append(tx) + + # Store events for pubsub + assert context.nc_events is not None + context.nc_events.append((tx, events_list)) + + # Store events in transaction metadata + if events_list: + tx_meta.nc_events = [(event.nc_id, event.data) for event in events_list] + context.save(tx) + + # Save logs + self._nc_log_storage.save_logs(tx, call_info, None) + + case NCExecutionFailure(runner=runner, exception=exception, traceback=tb): + # Log the failure + kwargs: dict[str, Any] = {} + if tx.name: + kwargs['__name'] = tx.name + if self._nc_exec_fail_trace: + kwargs['exc_info'] = True + self.log.info( + 'nc execution failed', + tx=tx.hash.hex(), + error=repr(exception), + cause=repr(exception.__cause__), + **kwargs, + ) + + on_failure(tx) + + # Save logs with exception info + call_info = runner.get_last_call_info() + self._nc_log_storage.save_logs(tx, call_info, (exception, tb)) + + case NCExecutionSkipped(seqnum_update=seqnum_update): + from hathor.nanocontracts.types import Address + + tx_meta.nc_execution = NCExecutionState.SKIPPED + context.save(tx) + + # Update seqnum if needed + if seqnum_update is not None: + nc_address, new_seqnum = seqnum_update + block_storage.set_address_seqnum(Address(nc_address), new_seqnum) + + case _: + assert_never(result) + + # Save block state root id. If nothing happens, it should be the same as its block parent. + block_storage.commit() + assert block_storage.get_root_id() is not None + meta.nc_block_root_id = block_storage.get_root_id() + context.save(block) + + # Log and verify execution states for all transactions + for tx in nc_calls: + tx_meta = tx.get_metadata() + assert tx_meta.nc_execution is not None + self.log.info('nano tx execution status', + blk=block.hash.hex(), + tx=tx.hash.hex(), + execution=tx_meta.nc_execution.value) + match tx_meta.nc_execution: + case NCExecutionState.PENDING: # pragma: no cover + assert False, 'unexpected pending state' # should never happen + case NCExecutionState.SUCCESS: + assert tx_meta.voided_by is None + case NCExecutionState.FAILURE: + assert tx_meta.voided_by == {tx.hash, NC_EXECUTION_FAIL_ID} + case NCExecutionState.SKIPPED: + assert tx_meta.voided_by + assert NC_EXECUTION_FAIL_ID not in tx_meta.voided_by + case _: # pragma: no cover + assert_never(tx_meta.nc_execution) + + def execute_transaction( + self, + *, + tx: Transaction, + block_storage: 'NCBlockStorage', + rng_seed: bytes, + ) -> NCExecutionResult: + """Execute a single NC transaction. + + This method is pure and side-effect free. It does not persist anything, + does not call callbacks, and returns all information needed by the caller + to handle success/failure cases.""" + from hathor.nanocontracts.types import Address + + tx_meta = tx.get_metadata() + if tx_meta.voided_by: + # Skip voided transactions. This might happen if a previous tx in nc_calls fails and + # mark this tx as voided. + # Check if seqnum needs to be updated. + nc_header = tx.get_nano_header() + seqnum = block_storage.get_address_seqnum(Address(nc_header.nc_address)) + seqnum_update: tuple[bytes, int] | None = None + if nc_header.nc_seqnum > seqnum: + seqnum_update = (nc_header.nc_address, nc_header.nc_seqnum) + return NCExecutionSkipped(seqnum_update=seqnum_update) + + runner = self._runner_factory.create( + block_storage=block_storage, + seed=rng_seed, + ) + + try: + runner.execute_from_tx(tx) + + # after the execution we have the latest state in the storage + # and at this point no tokens pending creation + self._verify_sum_after_execution(tx, block_storage) + + except NCFail as e: + return NCExecutionFailure( + runner=runner, + exception=e, + traceback=traceback.format_exc(), + ) + + return NCExecutionSuccess(runner=runner) + + def _verify_sum_after_execution(self, tx: Transaction, block_storage: 'NCBlockStorage') -> None: + """Verify token sums after execution for dynamically created tokens.""" + from hathor.verification.transaction_verifier import TransactionVerifier + try: + token_dict = tx.get_complete_token_info(block_storage) + TransactionVerifier.verify_sum(self._settings, tx, token_dict) + except TokenNotFound as e: + # At this point, any nonexistent token would have made a prior validation fail. For example, if there + # was a withdrawal of a nonexistent token, it would have failed in the balance validation before. + raise AssertionError from e + except Exception as e: + raise NCFail from e diff --git a/hathor/nanocontracts/faux_immutable.py b/hathor/nanocontracts/faux_immutable.py index f40db9417..0587d3b32 100644 --- a/hathor/nanocontracts/faux_immutable.py +++ b/hathor/nanocontracts/faux_immutable.py @@ -42,6 +42,8 @@ def _validate_faux_immutable_meta(name: str, bases: tuple[type, ...], attrs: dic '__doc__', '__init__', '__call__', + '__firstlineno__', + '__static_attributes__', }) | custom_allowed_dunder # pop the attribute so the created class doesn't have it and it isn't inherited @@ -114,7 +116,7 @@ def create_with_shell(cls: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> shell_type: type[T] = type(name, bases, attrs) # Use it to instantiate the object, init it, and return it. This mimics the default `__call__` behavior. - obj: T = cls.__new__(shell_type) + obj: T = cls.__new__(shell_type) # type: ignore[call-overload] shell_type.__init__(obj, *args, **kwargs) return obj diff --git a/hathor/nanocontracts/fields/dict_container.py b/hathor/nanocontracts/fields/dict_container.py index 6108e579e..84b40f88d 100644 --- a/hathor/nanocontracts/fields/dict_container.py +++ b/hathor/nanocontracts/fields/dict_container.py @@ -203,8 +203,7 @@ def get(self, key: K, /) -> V: def get(self, key: K, default: V | _T | None, /) -> V | _T | None: ... - # XXX: `misc` is ignored because mypy thinks this function does not accept all arguments of the second get overload - def get(self, key: K, default: V | _T | None = None, /) -> V | _T | None: # type: ignore[misc] + def get(self, key: K, default: V | _T | None = None, /) -> V | _T | None: # return the value for key if key is in the storage, else default if key in self: return self[key] diff --git a/hathor/nanocontracts/metered_exec.py b/hathor/nanocontracts/metered_exec.py index 5a37d685d..66016f985 100644 --- a/hathor/nanocontracts/metered_exec.py +++ b/hathor/nanocontracts/metered_exec.py @@ -14,7 +14,7 @@ from __future__ import annotations -from typing import Any, Callable, ParamSpec, TypeVar, cast +from typing import Any, Callable, TypeVar, TypeVarTuple, Unpack, cast from structlog import get_logger @@ -23,7 +23,7 @@ logger = get_logger() _T = TypeVar('_T') -_P = ParamSpec('_P') +_Ts = TypeVarTuple('_Ts') # https://docs.python.org/3/library/sys.html#sys.settrace @@ -77,7 +77,7 @@ def exec(self, source: str, /) -> dict[str, Any]: del env['__builtins__'] return env - def call(self, func: Callable[_P, _T], /, *, args: _P.args) -> _T: + def call(self, func: Callable[[Unpack[_Ts]], _T], /, *, args: tuple[Unpack[_Ts]]) -> _T: """ This is equivalent to `func(*args, **kwargs)` but with execution metering and memory limiting. """ from hathor import NCFail diff --git a/hathor/nanocontracts/nc_exec_logs.py b/hathor/nanocontracts/nc_exec_logs.py index 21a977060..90d9aa075 100644 --- a/hathor/nanocontracts/nc_exec_logs.py +++ b/hathor/nanocontracts/nc_exec_logs.py @@ -233,6 +233,8 @@ def error(self, message: str, **kwargs: Any) -> None: def __emit_event__(self, data: bytes) -> None: """Emit a custom event from a Nano Contract.""" + if not isinstance(data, bytes): + raise NCFail(f'event data must be of type `bytes`, found `{type(data).__name__}`') if len(data) > MAX_EVENT_SIZE: raise NCFail(f'event data cannot be larger than {MAX_EVENT_SIZE} bytes, is {len(data)}') self.__events__.append(NCEvent(nc_id=self.__nc_id__, data=data)) diff --git a/hathor/nanocontracts/nc_types/dataclass_nc_type.py b/hathor/nanocontracts/nc_types/dataclass_nc_type.py index f16edcb93..5924e595b 100644 --- a/hathor/nanocontracts/nc_types/dataclass_nc_type.py +++ b/hathor/nanocontracts/nc_types/dataclass_nc_type.py @@ -78,9 +78,8 @@ def _from_type(cls, type_: type[D], /, *, type_map: NCType.TypeMap) -> Self: # XXX: the order is important, but `dict` and `fields` should have a stable order values: dict[str, NCType] = {} for field in fields(type_): - values[field.name] = NCType.from_type(field.type, type_map=type_map) - # XXX: ignore arg-type because after using is_dataclass(type_) mypy gets confused about type_'s type - return cls(values, type_) # type: ignore[arg-type] + values[field.name] = NCType.from_type(field.type, type_map=type_map) # type: ignore[arg-type] + return cls(values, type_) @override def _check_value(self, value: D, /, *, deep: bool) -> None: diff --git a/hathor/nanocontracts/nc_types/utils.py b/hathor/nanocontracts/nc_types/utils.py index 0e8799dd3..617b2a9a0 100644 --- a/hathor/nanocontracts/nc_types/utils.py +++ b/hathor/nanocontracts/nc_types/utils.py @@ -191,7 +191,7 @@ def _get_aliased_type(type_: type | UnionType, alias_map: TypeAliasMap) -> tuple final_type = reduce(or_, aliased_args) # = type_args[0] | type_args[1] | ... | type_args[N] # XXX: for some reason, only sometimes doing T | None, results in typing.Union instead of types.UnionType assert isinstance(final_type, (UnionType, _UnionGenericAlias)), '| of types results in union' - return final_type, replaced + return final_type, replaced # type: ignore[return-value] # XXX: special case, when going from list -> tuple, we need to add an ellipsis, that is to say, the equivalent # type for `list[T]` is `tuple[T, ...]` diff --git a/hathor/nanocontracts/resources/blueprint.py b/hathor/nanocontracts/resources/blueprint.py index bf6f9d5ed..f2626cb94 100644 --- a/hathor/nanocontracts/resources/blueprint.py +++ b/hathor/nanocontracts/resources/blueprint.py @@ -18,6 +18,7 @@ import typing from typing import TYPE_CHECKING, Any, Optional +import hathor from hathor._openapi.register import register_resource from hathor.api_util import Resource, set_cors from hathor.nanocontracts import types as nc_types @@ -66,6 +67,8 @@ def get_type_name(self, type_: type) -> str: match args: case (_subtype, types.NoneType) | (types.NoneType, _subtype): return self._get_optional_type_name(_subtype) + case (hathor.Address, hathor.ContractId) | (hathor.ContractId, hathor.Address): + return 'CallerId' return self._get_composed_type_name('union', args) case nc_types.SignedData: return self._get_composed_type_name('SignedData', args) diff --git a/hathor/nanocontracts/resources/history.py b/hathor/nanocontracts/resources/history.py index 19be828c4..45d49b7c5 100644 --- a/hathor/nanocontracts/resources/history.py +++ b/hathor/nanocontracts/resources/history.py @@ -19,6 +19,7 @@ from hathor._openapi.register import register_resource from hathor.api_util import Resource, set_cors from hathor.nanocontracts.exception import NanoContractDoesNotExist +from hathor.nanocontracts.resources.on_chain import SortOrder from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.utils.api import ErrorResponse, QueryParams, Response @@ -43,7 +44,6 @@ def render_GET(self, request: 'Request') -> bytes: set_cors(request, 'GET') tx_storage = self.manager.tx_storage - assert tx_storage.indexes is not None if tx_storage.indexes.nc_history is None: request.setResponseCode(503) error_response = ErrorResponse(success=False, error='Nano contract history index not initialized') @@ -74,30 +74,46 @@ def render_GET(self, request: 'Request') -> bytes: error_response = ErrorResponse(success=False, error='Nano contract does not exist.') return error_response.json_dumpb() - if params.after: + is_desc = params.order.is_desc() + + if not params.before and not params.after: + iter_history = ( + iter(tx_storage.indexes.nc_history.get_newest(nc_id_bytes)) if is_desc + else iter(tx_storage.indexes.nc_history.get_oldest(nc_id_bytes)) + ) + else: + ref_tx_id_hex = params.before or params.after + assert ref_tx_id_hex is not None + try: - ref_tx = tx_storage.get_transaction(bytes.fromhex(params.after)) - except TransactionDoesNotExist: + ref_tx_id = bytes.fromhex(ref_tx_id_hex) + except ValueError: request.setResponseCode(400) - error_response = ErrorResponse(success=False, error=f'Hash {params.after} is not a transaction hash.') + error_response = ErrorResponse(success=False, error=f'Invalid hash: {ref_tx_id_hex}') return error_response.json_dumpb() - iter_history = iter(tx_storage.indexes.nc_history.get_older(nc_id_bytes, ref_tx)) - # This method returns the iterator including the tx used as `after` - next(iter_history) - elif params.before: try: - ref_tx = tx_storage.get_transaction(bytes.fromhex(params.before)) + ref_tx = tx_storage.get_transaction(ref_tx_id) except TransactionDoesNotExist: - request.setResponseCode(400) - error_response = ErrorResponse(success=False, error=f'Hash {params.before} is not a transaction hash.') + request.setResponseCode(404) + error_response = ErrorResponse(success=False, error=f'Transaction {ref_tx_id_hex} not found.') return error_response.json_dumpb() - iter_history = iter(tx_storage.indexes.nc_history.get_newer(nc_id_bytes, ref_tx)) - # This method returns the iterator including the tx used as `before` - next(iter_history) - else: - iter_history = iter(tx_storage.indexes.nc_history.get_newest(nc_id_bytes)) + if is_desc: + iter_getter = tx_storage.indexes.nc_history.get_newer if params.before \ + else tx_storage.indexes.nc_history.get_older + else: + iter_getter = tx_storage.indexes.nc_history.get_older if params.before \ + else tx_storage.indexes.nc_history.get_newer + + iter_history = iter(iter_getter(nc_id_bytes, ref_tx)) + # This method returns the iterator including the tx used as `before` or `after` + try: + next(iter_history) + except StopIteration: + # This can happen if the `ref_tx` is the only tx in the history, in this case the iterator will be + # empty. It's safe to just ignore this and let the loop below handle the empty iterator. + pass count = params.count has_more = False @@ -135,6 +151,7 @@ class NCHistoryParams(QueryParams): after: Optional[str] before: Optional[str] count: int = Field(default=100, lt=500) + order: SortOrder = SortOrder.DESC include_nc_logs: bool = Field(default=False) include_nc_events: bool = Field(default=False) @@ -239,6 +256,14 @@ class NCHistoryResponse(Response): 'schema': { 'type': 'string', } + }, { + 'name': 'order', + 'in': 'query', + 'description': 'Sort order, either "asc" or "desc".', + 'required': False, + 'schema': { + 'type': 'string', + } }, { 'name': 'include_nc_logs', 'in': 'query', diff --git a/hathor/nanocontracts/resources/nc_creation.py b/hathor/nanocontracts/resources/nc_creation.py index 9eadea29a..b9a8277c1 100644 --- a/hathor/nanocontracts/resources/nc_creation.py +++ b/hathor/nanocontracts/resources/nc_creation.py @@ -38,7 +38,6 @@ def __init__(self, manager: HathorManager) -> None: super().__init__() self.manager = manager self.tx_storage = self.manager.tx_storage - assert self.tx_storage.indexes is not None self.nc_creation_index = self.tx_storage.indexes.nc_creation self.nc_history_index = self.tx_storage.indexes.nc_history self.bp_history_index = self.tx_storage.indexes.blueprint_history diff --git a/hathor/nanocontracts/resources/on_chain.py b/hathor/nanocontracts/resources/on_chain.py index 5de787a04..689846b86 100644 --- a/hathor/nanocontracts/resources/on_chain.py +++ b/hathor/nanocontracts/resources/on_chain.py @@ -44,7 +44,6 @@ def render_GET(self, request: Request) -> bytes: set_cors(request, 'GET') tx_storage = self.manager.tx_storage - assert tx_storage.indexes is not None if tx_storage.indexes.blueprints is None: request.setResponseCode(503) error_response = ErrorResponse(success=False, error='Blueprint index not initialized') diff --git a/hathor/nanocontracts/resources/state.py b/hathor/nanocontracts/resources/state.py index 08f242e94..96696facb 100644 --- a/hathor/nanocontracts/resources/state.py +++ b/hathor/nanocontracts/resources/state.py @@ -84,15 +84,6 @@ def render_GET(self, request: 'Request') -> bytes: if params.block_height is not None: # Get hash of the block with the height - if self.manager.tx_storage.indexes is None: - # No indexes enabled in the storage - request.setResponseCode(503) - error_response = ErrorResponse( - success=False, - error='No indexes enabled in the storage, so we can\'t filter by block height.' - ) - return error_response.json_dumpb() - block_hash = self.manager.tx_storage.indexes.height.get(params.block_height) if block_hash is None: # No block hash was found with this height @@ -103,15 +94,6 @@ def render_GET(self, request: 'Request') -> bytes: ) return error_response.json_dumpb() elif params.timestamp is not None: - if self.manager.tx_storage.indexes is None: - # No indexes enabled in the storage - request.setResponseCode(503) - error_response = ErrorResponse( - success=False, - error='No indexes enabled in the storage, so we can\'t filter by timestamp.' - ) - return error_response.json_dumpb() - block_hashes, has_more = self.manager.tx_storage.indexes.sorted_blocks.get_older( timestamp=params.timestamp, hash_bytes=None, diff --git a/hathor/nanocontracts/runner/runner.py b/hathor/nanocontracts/runner/runner.py index 662281fee..591a3c438 100644 --- a/hathor/nanocontracts/runner/runner.py +++ b/hathor/nanocontracts/runner/runner.py @@ -1451,7 +1451,12 @@ def __init__( self.tx_storage = tx_storage self.nc_storage_factory = nc_storage_factory - def create(self, *, block_storage: NCBlockStorage, seed: bytes | None = None) -> Runner: + def create( + self, + *, + block_storage: NCBlockStorage, + seed: bytes | None = None, + ) -> Runner: return Runner( reactor=self.reactor, settings=self.settings, diff --git a/hathor/nanocontracts/sorter/random_sorter.py b/hathor/nanocontracts/sorter/random_sorter.py index e53db038d..2dd0a1186 100644 --- a/hathor/nanocontracts/sorter/random_sorter.py +++ b/hathor/nanocontracts/sorter/random_sorter.py @@ -154,9 +154,9 @@ def get_node(self, id_: VertexId) -> SorterNode: self.db[id_] = vertex return vertex - def get_vertices_with_no_outgoing_edges(self) -> list[VertexId]: + def get_vertices_with_no_outgoing_edges(self) -> SortedSet[VertexId]: """Get all vertices with no outgoing edges.""" - return sorted(v.id for v in self.db.values() if not v.outgoing_edges) + return SortedSet(v.id for v in self.db.values() if not v.outgoing_edges) def generate_random_topological_order(self, seed: bytes) -> list[VertexId]: """Generate a random topological order according to the DAG. @@ -169,7 +169,7 @@ def generate_random_topological_order(self, seed: bytes) -> list[VertexId]: rng = NanoRNG(seed) - candidates = SortedSet(self.get_vertices_with_no_outgoing_edges()) + candidates = self.get_vertices_with_no_outgoing_edges() ret = [] for i in range(len(self.db)): assert len(candidates) > 0, 'empty candidates, probably caused by circular dependencies in the graph' diff --git a/hathor/nanocontracts/types.py b/hathor/nanocontracts/types.py index 0dfb4954d..3d1835988 100644 --- a/hathor/nanocontracts/types.py +++ b/hathor/nanocontracts/types.py @@ -32,6 +32,7 @@ from hathor.nanocontracts.exception import BlueprintSyntaxError, NCSerializationError from hathor.nanocontracts.faux_immutable import FauxImmutableMeta from hathor.serialization import SerializationError +from hathor.transaction.scripts.opcode import OpcodesVersion from hathor.transaction.util import bytes_to_int, get_deposit_token_withdraw_amount, int_to_bytes from hathor.utils.typing import InnerTypeMixin @@ -45,7 +46,7 @@ # Types to be used by blueprints. -class Address(bytes, metaclass=FauxImmutableMeta): # type: ignore[misc] +class Address(bytes, metaclass=FauxImmutableMeta): __allow_faux_inheritance__ = True __allow_faux_dunder__ = ('__str__', '__repr__') __slots__ = () @@ -66,27 +67,27 @@ def __repr__(self) -> str: return f"Address.from_str({encoded_address!r})" -class VertexId(bytes, metaclass=FauxImmutableMeta): # type: ignore[misc] +class VertexId(bytes, metaclass=FauxImmutableMeta): __slots__ = () __allow_faux_inheritance__ = True -class BlueprintId(VertexId): # type: ignore[misc] +class BlueprintId(VertexId): __slots__ = () __allow_faux_inheritance__ = True -class ContractId(VertexId): # type: ignore[misc] +class ContractId(VertexId): __slots__ = () __allow_faux_inheritance__ = True -class TokenUid(bytes, metaclass=FauxImmutableMeta): # type: ignore[misc] +class TokenUid(bytes, metaclass=FauxImmutableMeta): __slots__ = () __allow_faux_inheritance__ = True -class TxOutputScript(bytes, metaclass=FauxImmutableMeta): # type: ignore[misc] +class TxOutputScript(bytes, metaclass=FauxImmutableMeta): __slots__ = () __allow_faux_inheritance__ = True @@ -161,12 +162,10 @@ def checksig(self, script: bytes) -> bool: """Check if `self.script_input` satisfies the provided script.""" from hathor.transaction.exceptions import ScriptError from hathor.transaction.scripts import ScriptExtras - from hathor.transaction.scripts.execute import execute_eval - full_data = self.script_input + script - log: list[str] = [] - extras = ScriptExtras(tx=self) # type: ignore[arg-type] + from hathor.transaction.scripts.execute import raw_script_eval + extras = ScriptExtras(tx=self, version=OpcodesVersion.V2) # type: ignore[arg-type] try: - execute_eval(full_data, log, extras) + raw_script_eval(input_data=self.script_input, output_script=script, extras=extras) except ScriptError: return False else: diff --git a/hathor/nanocontracts/utils.py b/hathor/nanocontracts/utils.py index 9524eb6f7..982f955ad 100644 --- a/hathor/nanocontracts/utils.py +++ b/hathor/nanocontracts/utils.py @@ -16,14 +16,13 @@ import hashlib from types import ModuleType -from typing import Callable, assert_never +from typing import Any, Callable from cryptography.exceptions import InvalidSignature from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec from pycoin.key.Key import Key as PycoinKey -from hathor.conf.settings import HathorSettings, NanoContractsSetting from hathor.crypto.util import ( decode_address, get_address_from_public_key_bytes, @@ -31,10 +30,7 @@ get_public_key_from_bytes_compressed, is_pubkey_compressed, ) -from hathor.feature_activation.feature import Feature -from hathor.feature_activation.feature_service import FeatureService from hathor.nanocontracts.types import NC_METHOD_TYPE_ATTR, BlueprintId, ContractId, NCMethodType, TokenUid, VertexId -from hathor.transaction import Block from hathor.transaction.headers import NanoHeader from hathor.util import not_none @@ -152,19 +148,6 @@ def sign_openssl_multisig( nano_header.nc_script = MultiSig.create_input_data(redeem_script, signatures) -def is_nano_active(*, settings: HathorSettings, block: Block, feature_service: FeatureService) -> bool: - """Return whether the Nano Contracts feature is active according to the provided settings and block.""" - match settings.ENABLE_NANO_CONTRACTS: - case NanoContractsSetting.DISABLED: - return False - case NanoContractsSetting.ENABLED: - return True - case NanoContractsSetting.FEATURE_ACTIVATION: - return feature_service.is_feature_active(vertex=block, feature=Feature.NANO_CONTRACTS) - case _: # pragma: no cover - assert_never(settings.ENABLE_NANO_CONTRACTS) - - def sha3(data: bytes) -> bytes: """Calculate the SHA3-256 of some data.""" return hashlib.sha3_256(data).digest() @@ -186,3 +169,32 @@ def verify_ecdsa(public_key: bytes, data: bytes, signature: bytes) -> bool: return True except InvalidSignature: return False + + +def json_dumps( + obj: object, + *, + ensure_ascii: bool = True, + indent: int | str | None = None, + separators: tuple[str, str] | None = (',', ':'), + sort_keys: bool = False, +) -> str: + """ + Serialize obj as a JSON. Arguments are a subset of Python's `json.dumps`. + It automatically converts `bytes`-like values to their hex representation. + """ + import json + + def dump_bytes(data: Any) -> str: + if isinstance(data, bytes): + return data.hex() + raise TypeError(f'Object of type {type(data).__name__} is not JSON serializable') + + return json.dumps( + obj, + ensure_ascii=ensure_ascii, + indent=indent, + separators=separators, + sort_keys=sort_keys, + default=dump_bytes, + ) diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 422deb7eb..44dacdcf0 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -259,11 +259,6 @@ def set_manager(self, manager: 'HathorManager') -> None: raise TypeError('Class built incorrectly without any enabled sync version') self.manager = manager - if self.is_sync_version_available(SyncVersion.V2): - assert self.manager.tx_storage.indexes is not None - indexes = self.manager.tx_storage.indexes - self.log.debug('enable sync-v2 indexes') - indexes.enable_mempool_index() def add_listen_address_description(self, addr: str) -> None: """Add address to listen for incoming connections.""" diff --git a/hathor/p2p/sync_v2/agent.py b/hathor/p2p/sync_v2/agent.py index 27957d9e2..08f6f2ba9 100644 --- a/hathor/p2p/sync_v2/agent.py +++ b/hathor/p2p/sync_v2/agent.py @@ -181,8 +181,6 @@ def __init__( def get_status(self) -> dict[str, Any]: """ Return the status of the sync. """ - assert self.tx_storage.indexes is not None - assert self.tx_storage.indexes.mempool_tips is not None tips = self.tx_storage.indexes.mempool_tips.get() tips_limited, tips_has_more = collect_n(iter(tips), MAX_MEMPOOL_STATUS_TIPS) res = { @@ -361,7 +359,6 @@ def run_sync_blocks(self) -> Generator[Any, Any, bool]: Notice that we might already have all other peer's blocks while the other peer is still syncing. """ - assert self.tx_storage.indexes is not None self.state = PeerState.SYNCING_BLOCKS # Get my best block. @@ -462,8 +459,6 @@ def send_get_tips(self) -> None: def handle_get_tips(self, _payload: str) -> None: """ Handle a GET-TIPS message. """ - assert self.tx_storage.indexes is not None - assert self.tx_storage.indexes.mempool_tips is not None if self._is_streaming: self.log.warn('can\'t send while streaming') # XXX: or can we? self.send_message(ProtocolMessages.MEMPOOL_END) @@ -641,7 +636,6 @@ def send_get_peer_block_hashes(self, heights: list[int]) -> None: def handle_get_peer_block_hashes(self, payload: str) -> None: """ Handle a GET-PEER-BLOCK-HASHES message. """ - assert self.tx_storage.indexes is not None heights = json.loads(payload) if len(heights) > 20: self.log.info('too many heights', heights_qty=len(heights)) diff --git a/hathor/p2p/sync_v2/streamers.py b/hathor/p2p/sync_v2/streamers.py index b7c4f7363..9dfa2a220 100644 --- a/hathor/p2p/sync_v2/streamers.py +++ b/hathor/p2p/sync_v2/streamers.py @@ -282,7 +282,7 @@ def send_next(self) -> None: # Skip blocks. if cur.is_block: - self.bfs.skip_neighbors(cur) + self.bfs.skip_neighbors() return assert isinstance(cur, Transaction) @@ -291,6 +291,7 @@ def send_next(self) -> None: if cur_metadata.first_block is None: self.log.debug('reached a tx that is not confirmed, stopping streaming') self.sync_agent.stop_tx_streaming_server(StreamEnd.TX_NOT_CONFIRMED) + self.bfs.add_neighbors() return # Check if tx is confirmed by the `self.current_block` or any next block. @@ -299,7 +300,7 @@ def send_next(self) -> None: first_block = self.tx_storage.get_block(cur_metadata.first_block) if not_none(first_block.static_metadata.height) < not_none(self.current_block.static_metadata.height): self.log.debug('skipping tx: out of current block') - self.bfs.skip_neighbors(cur) + self.bfs.skip_neighbors() return self.log.debug('send next transaction', tx_id=cur.hash.hex()) @@ -309,4 +310,4 @@ def send_next(self) -> None: if self.counter >= self.limit: self.log.debug('limit exceeded, stopping streaming') self.sync_agent.stop_tx_streaming_server(StreamEnd.LIMIT_EXCEEDED) - return + self.bfs.add_neighbors() diff --git a/hathor/p2p/sync_v2/transaction_streaming_client.py b/hathor/p2p/sync_v2/transaction_streaming_client.py index e4a2f7925..92402cd2d 100644 --- a/hathor/p2p/sync_v2/transaction_streaming_client.py +++ b/hathor/p2p/sync_v2/transaction_streaming_client.py @@ -18,6 +18,7 @@ from structlog import get_logger from twisted.internet.defer import Deferred, inlineCallbacks +from hathor.feature_activation.utils import Features from hathor.p2p.sync_v2.exception import ( InvalidVertexError, StreamingError, @@ -27,6 +28,7 @@ from hathor.p2p.sync_v2.streamers import StreamEnd from hathor.transaction import BaseTransaction, Transaction from hathor.transaction.exceptions import HathorError, TxValidationError +from hathor.transaction.scripts.opcode import OpcodesVersion from hathor.types import VertexId from hathor.verification.verification_params import VerificationParams @@ -53,8 +55,13 @@ def __init__(self, # We can also set the `nc_block_root_id` to `None` because we only call `verify_basic`, # which doesn't need it. self.verification_params = VerificationParams( - enable_checkdatasig_count=False, nc_block_root_id=None, + features=Features( + count_checkdatasig_op=False, + nanocontracts=False, + fee_tokens=False, + opcodes_version=OpcodesVersion.V1, + ) ) self.reactor = sync_agent.reactor diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index 55f9b9591..50b7718c7 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -12,8 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. -import datetime import re +from datetime import UTC, datetime, timedelta from typing import Any, Optional import requests @@ -111,8 +111,8 @@ def generate_certificate(private_key: RSAPrivateKey, ca_file: str, ca_pkey_file: ]) builder = builder.subject_name(subject) - builder = builder.not_valid_before(datetime.datetime.utcnow() - datetime.timedelta(hours=1)) - builder = builder.not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(hours=24*365*100)) + builder = builder.not_valid_before(datetime.now(UTC) - timedelta(hours=1)) + builder = builder.not_valid_after(datetime.now(UTC) + timedelta(hours=24*365*100)) builder = builder.serial_number(x509.random_serial_number()) builder = builder.public_key(public_key) diff --git a/hathor/pubsub.py b/hathor/pubsub.py index 95f0f2bb7..a6f208818 100644 --- a/hathor/pubsub.py +++ b/hathor/pubsub.py @@ -36,6 +36,10 @@ class HathorEvents(Enum): """ + NETWORK_NEW_TX_PROCESSING: + Triggered when a new tx/block is received and will begin processing, just before consensus + Publishes a tx/block object + NETWORK_NEW_TX_ACCEPTED: Triggered when a new tx/block is accepted in the network Publishes a tx/block object @@ -118,6 +122,8 @@ class HathorEvents(Enum): NETWORK_PEER_DISCONNECTED = 'network:peer_disconnected' + NETWORK_NEW_TX_PROCESSING = 'network:new_tx_processing' + NETWORK_NEW_TX_ACCEPTED = 'network:new_tx_accepted' CONSENSUS_TX_UPDATE = 'consensus:tx_update' diff --git a/hathor/reactor/reactor.py b/hathor/reactor/reactor.py index b92c80062..49af3f3fb 100644 --- a/hathor/reactor/reactor.py +++ b/hathor/reactor/reactor.py @@ -60,7 +60,13 @@ def initialize_global_reactor(*, use_asyncio_reactor: bool = False) -> ReactorPr from twisted.internet.error import ReactorAlreadyInstalledError try: - asyncioreactor.install(asyncio.get_event_loop()) + loop = asyncio.get_running_loop() + except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + + try: + asyncioreactor.install(loop) except ReactorAlreadyInstalledError as e: msg = ( "There's a Twisted reactor installed already. It's probably the default one, installed indirectly by " diff --git a/hathor/simulator/utils.py b/hathor/simulator/utils.py index 9afe6c464..309c467c6 100644 --- a/hathor/simulator/utils.py +++ b/hathor/simulator/utils.py @@ -53,7 +53,7 @@ def gen_new_tx(manager: HathorManager, address: str, value: int) -> Transaction: def add_new_blocks( manager: HathorManager, num_blocks: int, - advance_clock: Optional[int] = None, + advance_clock: int = 1, *, parent_block_hash: Optional[VertexId] = None, block_data: bytes = b'', @@ -85,7 +85,7 @@ def add_new_blocks( def add_new_block( manager: HathorManager, - advance_clock: Optional[int] = None, + advance_clock: int = 1, *, parent_block_hash: Optional[VertexId] = None, data: bytes = b'', diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 165ffc200..37392b12b 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -744,6 +744,7 @@ def update_accumulated_weight( work += weight_to_work(tx.weight) if stop_value is not None and work > stop_value: break + bfs_walk.add_neighbors() metadata.accumulated_weight = work if save_file: diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index 64a3aabab..99f8d3abd 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -369,10 +369,11 @@ def iter_transactions_in_this_block(self) -> Iterator[Transaction]: for tx in bfs.run(self, skip_root=True): tx_meta = tx.get_metadata() if tx_meta.first_block != self.hash: - bfs.skip_neighbors(tx) + bfs.skip_neighbors() continue assert isinstance(tx, Transaction) yield tx + bfs.add_neighbors() @override def init_static_metadata_from_storage(self, settings: HathorSettings, storage: 'TransactionStorage') -> None: diff --git a/hathor/transaction/headers/nano_header.py b/hathor/transaction/headers/nano_header.py index 55bf608b0..cf49b94dc 100644 --- a/hathor/transaction/headers/nano_header.py +++ b/hathor/transaction/headers/nano_header.py @@ -251,7 +251,7 @@ def get_contract_id(self) -> ContractId: return ContractId(VertexId(self.tx.hash)) return ContractId(VertexId(self.nc_id)) - def get_blueprint_id(self, block: Block | None = None, *, accept_failed_execution: bool = False) -> BlueprintId: + def get_blueprint_id(self, block: Block | None = None) -> BlueprintId: """Return the blueprint id.""" from hathor.nanocontracts.exception import NanoContractDoesNotExist from hathor.nanocontracts.types import BlueprintId, ContractId, VertexId as NCVertexId @@ -294,12 +294,23 @@ def get_blueprint_id(self, block: Block | None = None, *, accept_failed_executio # otherwise, it failed or skipped execution from hathor.transaction.nc_execution_state import NCExecutionState assert nc_creation_meta.nc_execution in (NCExecutionState.FAILURE, NCExecutionState.SKIPPED) - if not accept_failed_execution: - raise NanoContractDoesNotExist(f'contract creation is not executed: {self.nc_id.hex()}') + raise NanoContractDoesNotExist(f'contract creation is not executed: {self.nc_id.hex()}') blueprint_id = BlueprintId(NCVertexId(nc_creation.get_nano_header().nc_id)) return blueprint_id + def get_blueprint_id_for_json(self, block: Block | None = None) -> BlueprintId: + """ + Return the blueprint id for json use. + This is equivalent to `get_blueprint_id`, but on error it returns an empty id instead of failing. + """ + from hathor.nanocontracts.exception import NanoContractDoesNotExist + from hathor.nanocontracts.types import BlueprintId + try: + return self.get_blueprint_id(block) + except NanoContractDoesNotExist: + return BlueprintId(b'') + def get_actions(self) -> list[NCAction]: """Get a list of NCActions from the header actions.""" return [header_action.to_nc_action(self.tx) for header_action in self.nc_actions] diff --git a/hathor/transaction/json_serializer.py b/hathor/transaction/json_serializer.py index f6046c53d..bae88121d 100644 --- a/hathor/transaction/json_serializer.py +++ b/hathor/transaction/json_serializer.py @@ -131,7 +131,7 @@ def decode_nc_args(self, tx: 'Transaction') -> Any: blueprint_id = BlueprintId(meta.nc_calls[0].blueprint_id) else: # Get blueprint_id from NanoHeader - blueprint_id = nano_header.get_blueprint_id(accept_failed_execution=True) + blueprint_id = nano_header.get_blueprint_id_for_json() try: blueprint_class = self.tx_storage.get_blueprint_class(blueprint_id) diff --git a/hathor/transaction/resources/block_at_height.py b/hathor/transaction/resources/block_at_height.py index 9d6577ded..60dd4f062 100644 --- a/hathor/transaction/resources/block_at_height.py +++ b/hathor/transaction/resources/block_at_height.py @@ -44,8 +44,6 @@ def render_GET(self, request: 'Request') -> bytes: :rtype: string (json) """ - assert self.manager.tx_storage.indexes is not None - request.setHeader(b'content-type', b'application/json; charset=utf-8') set_cors(request, 'GET') diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index 2cae26f91..dbc58af52 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -22,6 +22,7 @@ from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import create_output_script from hathor.util import api_catch_exceptions, json_dumpb, json_loadb +from hathor.verification.verification_params import VerificationParams def from_raw_output(raw_output: dict, tokens: list[bytes]) -> TxOutput: @@ -116,13 +117,14 @@ def _verify_unsigned_skip_pow(self, tx: Transaction) -> None: verifiers.tx.verify_output_token_indexes(tx) verifiers.vertex.verify_sigops_output(tx, enable_checkdatasig_count=True) verifiers.tx.verify_sigops_input(tx, enable_checkdatasig_count=True) + best_block = self.manager.tx_storage.get_best_block() + params = VerificationParams.default_for_mempool(best_block=best_block) # need to run verify_inputs first to check if all inputs exist - verifiers.tx.verify_inputs(tx, skip_script=True) + verifiers.tx.verify_inputs(tx, params, skip_script=True) verifiers.vertex.verify_parents(tx) - best_block = self.manager.tx_storage.get_best_block() block_storage = self.manager.get_nc_block_storage(best_block) - verifiers.tx.verify_sum(self.manager._settings, tx.get_complete_token_info(block_storage)) + verifiers.tx.verify_sum(self.manager._settings, tx, tx.get_complete_token_info(block_storage)) CreateTxResource.openapi = { diff --git a/hathor/transaction/resources/mempool.py b/hathor/transaction/resources/mempool.py index 0328032a9..bd2e6059c 100644 --- a/hathor/transaction/resources/mempool.py +++ b/hathor/transaction/resources/mempool.py @@ -85,12 +85,8 @@ def render_GET(self, request: 'Request') -> bytes: return json_dumpb(data) def _get_from_index(self, index_source: IndexSource) -> Iterator[Transaction]: - tx_storage = self.manager.tx_storage - assert tx_storage.indexes is not None if index_source == IndexSource.ANY or index_source == IndexSource.MEMPOOL: - # XXX: if source is ANY we try to use the mempool when possible - if tx_storage.indexes.mempool_tips is None: - raise ValueError('mempool index is not enabled') + # XXX: if source is ANY we try to use the mempool yield from self._get_from_mempool_tips_index() elif index_source == IndexSource.TX_TIPS: raise ValueError('tx-tips index has been removed') @@ -99,8 +95,6 @@ def _get_from_index(self, index_source: IndexSource) -> Iterator[Transaction]: def _get_from_mempool_tips_index(self) -> Iterator[Transaction]: tx_storage = self.manager.tx_storage - assert tx_storage.indexes is not None - assert tx_storage.indexes.mempool_tips is not None yield from tx_storage.indexes.mempool_tips.iter_all(tx_storage) diff --git a/hathor/transaction/resources/transaction.py b/hathor/transaction/resources/transaction.py index 6f22a4437..cc46c07f8 100644 --- a/hathor/transaction/resources/transaction.py +++ b/hathor/transaction/resources/transaction.py @@ -58,8 +58,6 @@ def get_tx_extra_data( Returns success, tx serializes, metadata and spent outputs """ assert tx.storage is not None - assert tx.storage.indexes is not None - settings = get_global_settings() serialized = tx.to_json(decode_script=True) serialized['raw'] = tx.get_struct().hex() diff --git a/hathor/transaction/resources/utxo_search.py b/hathor/transaction/resources/utxo_search.py index 51d04e312..32a4dad14 100644 --- a/hathor/transaction/resources/utxo_search.py +++ b/hathor/transaction/resources/utxo_search.py @@ -62,7 +62,6 @@ def render_GET(self, request: 'Request') -> bytes: # setup tx_storage = self.manager.tx_storage - assert tx_storage.indexes is not None if tx_storage.indexes.utxo is None: request.setResponseCode(503) return json_dumpb({'success': False}) diff --git a/hathor/transaction/scripts/execute.py b/hathor/transaction/scripts/execute.py index b19ab6c0a..1b393712d 100644 --- a/hathor/transaction/scripts/execute.py +++ b/hathor/transaction/scripts/execute.py @@ -12,17 +12,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import struct from dataclasses import dataclass -from typing import NamedTuple, Optional, Union +from typing import TYPE_CHECKING, NamedTuple, Optional, Union from hathor.transaction import BaseTransaction, Transaction, TxInput from hathor.transaction.exceptions import DataIndexError, FinalStackInvalid, InvalidScriptError, OutOfData +if TYPE_CHECKING: + from hathor.transaction.scripts.opcode import OpcodesVersion + @dataclass(slots=True, frozen=True, kw_only=True) class ScriptExtras: tx: Transaction + version: OpcodesVersion @dataclass(slots=True, frozen=True, kw_only=True) @@ -72,7 +78,7 @@ def execute_eval(data: bytes, log: list[str], extras: ScriptExtras) -> None: continue # this is an opcode manipulating the stack - execute_op_code(Opcode(opcode), context) + execute_op_code(Opcode(opcode), context, extras.version) evaluate_final_stack(stack, log) @@ -94,7 +100,7 @@ def evaluate_final_stack(stack: Stack, log: list[str]) -> None: raise FinalStackInvalid('\n'.join(log)) -def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> None: +def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction, version: OpcodesVersion) -> None: """Evaluates the output script and input data according to a very limited subset of Bitcoin's scripting language. @@ -112,7 +118,7 @@ def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> No raw_script_eval( input_data=txin.data, output_script=spent_tx.outputs[txin.index].script, - extras=UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx), + extras=UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx, version=version), ) diff --git a/hathor/transaction/scripts/multi_sig.py b/hathor/transaction/scripts/multi_sig.py index 1fabd943a..e09fc3b2d 100644 --- a/hathor/transaction/scripts/multi_sig.py +++ b/hathor/transaction/scripts/multi_sig.py @@ -143,7 +143,7 @@ def parse_script(cls, script: bytes) -> Optional['MultiSig']: timelock = None pushdata_timelock = groups[0] if pushdata_timelock: - timelock_bytes = pushdata_timelock[1:] + timelock_bytes = get_pushdata(pushdata_timelock) timelock = struct.unpack('!I', timelock_bytes)[0] redeem_script_hash = get_pushdata(groups[1]) address_b58 = get_address_b58_from_redeem_script_hash(redeem_script_hash) diff --git a/hathor/transaction/scripts/opcode.py b/hathor/transaction/scripts/opcode.py index eddaecfbb..0f39424fb 100644 --- a/hathor/transaction/scripts/opcode.py +++ b/hathor/transaction/scripts/opcode.py @@ -15,6 +15,7 @@ import datetime import struct from enum import IntEnum +from typing import Callable from cryptography.exceptions import InvalidSignature from cryptography.hazmat.primitives import hashes @@ -48,6 +49,11 @@ from hathor.transaction.scripts.script_context import ScriptContext +class OpcodesVersion(IntEnum): + V1 = 1 + V2 = 2 + + class Opcode(IntEnum): OP_0 = 0x50 OP_1 = 0x51 @@ -626,7 +632,7 @@ def op_integer(opcode: int, stack: Stack) -> None: raise ScriptError(e) from e -def execute_op_code(opcode: Opcode, context: ScriptContext) -> None: +def execute_op_code(opcode: Opcode, context: ScriptContext, version: OpcodesVersion) -> None: """ Execute a function opcode. @@ -635,17 +641,27 @@ def execute_op_code(opcode: Opcode, context: ScriptContext) -> None: context: the script context to be manipulated. """ context.logs.append(f'Executing function opcode {opcode.name} ({hex(opcode.value)})') - match opcode: - case Opcode.OP_DUP: op_dup(context) - case Opcode.OP_EQUAL: op_equal(context) - case Opcode.OP_EQUALVERIFY: op_equalverify(context) - case Opcode.OP_CHECKSIG: op_checksig(context) - case Opcode.OP_HASH160: op_hash160(context) - case Opcode.OP_GREATERTHAN_TIMESTAMP: op_greaterthan_timestamp(context) - case Opcode.OP_CHECKMULTISIG: op_checkmultisig(context) - case Opcode.OP_DATA_STREQUAL: op_data_strequal(context) - case Opcode.OP_DATA_GREATERTHAN: op_data_greaterthan(context) - case Opcode.OP_DATA_MATCH_VALUE: op_data_match_value(context) - case Opcode.OP_CHECKDATASIG: op_checkdatasig(context) - case Opcode.OP_FIND_P2PKH: op_find_p2pkh(context) - case _: raise ScriptError(f'unknown opcode: {opcode}') + opcode_fns: dict[Opcode, Callable[[ScriptContext], None]] = { + Opcode.OP_DUP: op_dup, + Opcode.OP_EQUAL: op_equal, + Opcode.OP_EQUALVERIFY: op_equalverify, + Opcode.OP_CHECKSIG: op_checksig, + Opcode.OP_HASH160: op_hash160, + Opcode.OP_GREATERTHAN_TIMESTAMP: op_greaterthan_timestamp, + Opcode.OP_CHECKMULTISIG: op_checkmultisig, + } + + if version == OpcodesVersion.V1: + opcode_fns.update({ + Opcode.OP_DATA_STREQUAL: op_data_strequal, + Opcode.OP_DATA_GREATERTHAN: op_data_greaterthan, + Opcode.OP_DATA_MATCH_VALUE: op_data_match_value, + Opcode.OP_CHECKDATASIG: op_checkdatasig, + Opcode.OP_FIND_P2PKH: op_find_p2pkh, + }) + + opcode_fn = opcode_fns.get(opcode) + if opcode_fn is None: + raise ScriptError(f'unknown opcode: {opcode}') + + opcode_fn(context) diff --git a/hathor/transaction/scripts/p2pkh.py b/hathor/transaction/scripts/p2pkh.py index 9358098df..0be24bb6d 100644 --- a/hathor/transaction/scripts/p2pkh.py +++ b/hathor/transaction/scripts/p2pkh.py @@ -119,7 +119,7 @@ def parse_script(cls, script: bytes) -> Optional['P2PKH']: timelock = None pushdata_timelock = groups[0] if pushdata_timelock: - timelock_bytes = pushdata_timelock[1:] + timelock_bytes = get_pushdata(pushdata_timelock) timelock = struct.unpack('!I', timelock_bytes)[0] pushdata_address = groups[1] public_key_hash = get_pushdata(pushdata_address) diff --git a/hathor/transaction/storage/__init__.py b/hathor/transaction/storage/__init__.py index c0a060722..35050461d 100644 --- a/hathor/transaction/storage/__init__.py +++ b/hathor/transaction/storage/__init__.py @@ -12,14 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.transaction.storage.cache_storage import TransactionCacheStorage from hathor.transaction.storage.rocksdb_storage import TransactionRocksDBStorage from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.transaction.storage.vertex_storage_protocol import VertexStorageProtocol __all__ = [ 'TransactionStorage', - 'TransactionCacheStorage', 'TransactionRocksDBStorage', 'VertexStorageProtocol' ] diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py deleted file mode 100644 index 92ed984fd..000000000 --- a/hathor/transaction/storage/cache_storage.py +++ /dev/null @@ -1,270 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from collections import OrderedDict -from typing import TYPE_CHECKING, Any, Iterator, Optional - -from twisted.internet import threads -from typing_extensions import override - -from hathor.indexes import IndexesManager -from hathor.reactor import ReactorProtocol as Reactor -from hathor.transaction import BaseTransaction -from hathor.transaction.storage.migrations import MigrationState -from hathor.transaction.storage.transaction_storage import BaseTransactionStorage -from hathor.transaction.storage.tx_allow_scope import TxAllowScope -from hathor.transaction.vertex_children import VertexChildrenService - -if TYPE_CHECKING: - from hathor.conf.settings import HathorSettings - from hathor.nanocontracts.storage import NCStorageFactory - - -class TransactionCacheStorage(BaseTransactionStorage): - """Caching storage to be used 'on top' of other storages. - """ - - cache: OrderedDict[bytes, BaseTransaction] - dirty_txs: set[bytes] - - def __init__( - self, - store: 'BaseTransactionStorage', - reactor: Reactor, - interval: int = 5, - capacity: int = 10000, - *, - settings: 'HathorSettings', - nc_storage_factory: NCStorageFactory, - vertex_children_service: VertexChildrenService, - indexes: Optional[IndexesManager], - _clone_if_needed: bool = False, - ) -> None: - """ - :param store: a subclass of BaseTransactionStorage - :type store: :py:class:`hathor.transaction.storage.BaseTransactionStorage` - - :param reactor: Twisted reactor which handles the mainloop and the events. - :type reactor: :py:class:`twisted.internet.Reactor` - - :param interval: the cache flush interval. Writes will happen every interval seconds - :type interval: int - - :param capacity: cache capacity - :type capacity: int - - :param _clone_if_needed: *private parameter*, defaults to True, controls whether to clone - transaction/blocks/metadata when returning those objects. - :type _clone_if_needed: bool - """ - if store.indexes is not None: - raise ValueError('internal storage cannot have indexes enabled') - - store.remove_cache() - self.store = store - self.reactor = reactor - self.interval = interval - self.capacity = capacity - self.flush_deferred = None - self._clone_if_needed = _clone_if_needed - self.cache = OrderedDict() - # dirty_txs has the txs that have been modified but are not persisted yet - self.dirty_txs = set() - self.stats = dict(hit=0, miss=0) - - # we need to use only one weakref dict, so we must first initialize super, and then - # attribute the same weakref for both. - super().__init__( - indexes=indexes, - settings=settings, - nc_storage_factory=nc_storage_factory, - vertex_children_service=vertex_children_service, - ) - - self._tx_weakref = store._tx_weakref - # XXX: just to make sure this isn't being used anywhere, setters/getters should be used instead - del self._allow_scope - - def set_allow_scope(self, allow_scope: TxAllowScope) -> None: - self.store._allow_scope = allow_scope - - def get_allow_scope(self) -> TxAllowScope: - return self.store._allow_scope - - def set_capacity(self, capacity: int) -> None: - """Change the max number of items in cache.""" - assert capacity >= 0 - self.capacity = capacity - while len(self.cache) > self.capacity: - self._cache_popitem() - - def _clone(self, x: BaseTransaction) -> BaseTransaction: - if self._clone_if_needed: - return x.clone() - else: - return x - - def get_migration_state(self, migration_name: str) -> MigrationState: - # forward to internal store - return self.store.get_migration_state(migration_name) - - def set_migration_state(self, migration_name: str, state: MigrationState) -> None: - # forward to internal store - self.store.set_migration_state(migration_name, state) - - def pre_init(self) -> None: - # XXX: not calling self.store.pre_init() because it would run `BaseTransactionStorage.pre_init` twice. - super().pre_init() - self.reactor.callLater(self.interval, self._start_flush_thread) - - def _enable_weakref(self) -> None: - super()._enable_weakref() - self.store._enable_weakref() - - def _disable_weakref(self) -> None: - super()._disable_weakref() - self.store._disable_weakref() - - def _start_flush_thread(self) -> None: - if self.flush_deferred is None: - deferred = threads.deferToThread(self._flush_to_storage, self.dirty_txs.copy()) - deferred.addCallback(self._cb_flush_thread) - deferred.addErrback(self._err_flush_thread) - self.flush_deferred = deferred - - def _cb_flush_thread(self, flushed_txs: set[bytes]) -> None: - self.reactor.callLater(self.interval, self._start_flush_thread) - self.flush_deferred = None - - def _err_flush_thread(self, reason: Any) -> None: - self.log.error('error flushing transactions', reason=reason) - self.reactor.callLater(self.interval, self._start_flush_thread) - self.flush_deferred = None - - def _flush_to_storage(self, dirty_txs_copy: set[bytes]) -> None: - """Write dirty pages to disk.""" - for tx_hash in dirty_txs_copy: - # a dirty tx might be removed from self.cache outside this thread: if _update_cache is called - # and we need to save the tx to disk immediately. So it might happen that the tx which was - # in the dirty set when the flush thread began is not in cache anymore, hence this `if` check - if tx_hash in self.cache: - tx = self._clone(self.cache[tx_hash]) - self.dirty_txs.discard(tx_hash) - self.store._save_transaction(tx) - - def remove_transaction(self, tx: BaseTransaction) -> None: - super().remove_transaction(tx) - self.cache.pop(tx.hash, None) - self.dirty_txs.discard(tx.hash) - self.store.remove_transaction(tx) - self._remove_from_weakref(tx) - - def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None: - self._save_transaction(tx) - self._save_to_weakref(tx) - - # call super which adds to index if needed - super().save_transaction(tx, only_metadata=only_metadata) - - @override - def _save_static_metadata(self, tx: BaseTransaction) -> None: - self.store._save_static_metadata(tx) - - def get_all_genesis(self) -> set[BaseTransaction]: - return self.store.get_all_genesis() - - def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None: - """Saves the transaction without modifying TimestampIndex entries (in superclass).""" - self._update_cache(tx) - self.dirty_txs.add(tx.hash) - - def _cache_popitem(self) -> BaseTransaction: - """Pop the last recently used cache item.""" - (_, removed_tx) = self.cache.popitem(last=False) - if removed_tx.hash in self.dirty_txs: - # write to disk so we don't lose the last update - self.dirty_txs.discard(removed_tx.hash) - self.store.save_transaction(removed_tx) - return removed_tx - - def _update_cache(self, tx: BaseTransaction) -> None: - """Updates the cache making sure it has at most the number of elements configured - as its capacity. - - If we need to evict a tx from cache and it's dirty, write it to disk immediately. - """ - _tx = self.cache.get(tx.hash, None) - if not _tx: - if len(self.cache) >= self.capacity: - self._cache_popitem() - self.cache[tx.hash] = self._clone(tx) - else: - # Tx might have been updated - self.cache[tx.hash] = self._clone(tx) - self.cache.move_to_end(tx.hash, last=True) - - def transaction_exists(self, hash_bytes: bytes) -> bool: - if hash_bytes in self.cache: - return True - return self.store.transaction_exists(hash_bytes) - - def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction: - tx: Optional[BaseTransaction] - if hash_bytes in self.cache: - tx = self._clone(self.cache[hash_bytes]) - self.cache.move_to_end(hash_bytes, last=True) - self.stats['hit'] += 1 - else: - tx = self.get_transaction_from_weakref(hash_bytes) - if tx is not None: - self.stats['hit'] += 1 - else: - tx = self.store.get_transaction(hash_bytes) - tx.storage = self - self.stats['miss'] += 1 - self._update_cache(tx) - self._save_to_weakref(tx) - assert tx is not None - return tx - - def _get_all_transactions(self) -> Iterator[BaseTransaction]: - self._flush_to_storage(self.dirty_txs.copy()) - # XXX: explicitly use _get_all_transaction instead of get_all_transactions because there will already be a - # TransactionCacheStorage.get_all_transactions outer method - for tx in self.store._get_all_transactions(): - tx.storage = self - self._save_to_weakref(tx) - yield tx - - def is_empty(self) -> bool: - self._flush_to_storage(self.dirty_txs.copy()) - return self.store.is_empty() - - def add_value(self, key: str, value: str) -> None: - self.store.add_value(key, value) - - def remove_value(self, key: str) -> None: - self.store.remove_value(key) - - def get_value(self, key: str) -> Optional[str]: - return self.store.get_value(key) - - def flush(self): - self._flush_to_storage(self.dirty_txs.copy()) - - @override - def migrate_vertex_children(self) -> None: - self.store.migrate_vertex_children() diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index 8f9408ace..95932d2c7 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -14,17 +14,20 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Iterator, Optional +from collections import OrderedDict +from typing import TYPE_CHECKING, Any, Iterator, Optional from structlog import get_logger +from twisted.internet import threads from typing_extensions import override from hathor.indexes import IndexesManager +from hathor.reactor import ReactorProtocol from hathor.storage import RocksDBStorage from hathor.transaction.static_metadata import VertexStaticMetadata from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.migrations import MigrationState -from hathor.transaction.storage.transaction_storage import BaseTransactionStorage +from hathor.transaction.storage.transaction_storage import BaseTransactionStorage, CacheConfig, CacheData from hathor.transaction.vertex_children import RocksDBVertexChildrenService from hathor.transaction.vertex_parser import VertexParser from hathor.types import VertexId @@ -55,14 +58,17 @@ class TransactionRocksDBStorage(BaseTransactionStorage): def __init__( self, - rocksdb_storage: RocksDBStorage, - indexes: Optional[IndexesManager] = None, *, + reactor: ReactorProtocol, + rocksdb_storage: RocksDBStorage, settings: 'HathorSettings', vertex_parser: VertexParser, nc_storage_factory: NCStorageFactory, vertex_children_service: RocksDBVertexChildrenService, + indexes: IndexesManager, + cache_config: CacheConfig | None = None, ) -> None: + self._reactor = reactor self._cf_tx = rocksdb_storage.get_or_create_column_family(_CF_NAME_TX) self._cf_meta = rocksdb_storage.get_or_create_column_family(_CF_NAME_META) self._cf_static_meta = rocksdb_storage.get_or_create_column_family(_CF_NAME_STATIC_META) @@ -72,6 +78,15 @@ def __init__( self._rocksdb_storage = rocksdb_storage self._db = rocksdb_storage.get_db() self.vertex_parser = vertex_parser + + cache_config = cache_config or CacheConfig(capacity=0) + self.cache_data = CacheData( + interval=cache_config.interval, + capacity=cache_config.capacity, + cache=OrderedDict(), + dirty_txs=set(), + ) + super().__init__( indexes=indexes, settings=settings, @@ -79,6 +94,79 @@ def __init__( vertex_children_service=vertex_children_service, ) + def pre_init(self) -> None: + super().pre_init() + self._reactor.callLater(self.cache_data.interval, self._start_flush_thread) + + @override + def set_cache_capacity(self, capacity: int) -> None: + assert capacity >= 0 + self.cache_data.capacity = capacity + while len(self.cache_data.cache) > capacity: + self._cache_popitem() + + def flush(self) -> None: + self._flush_to_storage(self.cache_data.dirty_txs.copy()) + + def _start_flush_thread(self) -> None: + if self.cache_data.flush_deferred is None: + deferred = threads.deferToThread(self._flush_to_storage, self.cache_data.dirty_txs.copy()) + deferred.addCallback(self._cb_flush_thread) + deferred.addErrback(self._err_flush_thread) + self.cache_data.flush_deferred = deferred + + def _cb_flush_thread(self, _res: None) -> None: + self._reactor.callLater(self.cache_data.interval, self._start_flush_thread) + self.cache_data.flush_deferred = None + + def _err_flush_thread(self, reason: Any) -> None: + self.log.error('error flushing transactions', reason=reason) + self._reactor.callLater(self.cache_data.interval, self._start_flush_thread) + self.cache_data.flush_deferred = None + + def _flush_to_storage(self, dirty_txs_copy: set[bytes]) -> None: + """Write dirty pages to disk.""" + for tx_hash in dirty_txs_copy: + # a dirty tx might be removed from self.cache outside this thread: if _update_cache is called + # and we need to save the tx to disk immediately. So it might happen that the tx which was + # in the dirty set when the flush thread began is not in cache anymore, hence this `if` check + if tx_hash in self.cache_data.cache: + tx = self.cache_data.cache[tx_hash] + self.cache_data.dirty_txs.discard(tx_hash) + self._save_transaction_to_db(tx) + + def _cache_popitem(self) -> None: + """Pop the last recently used cache item.""" + try: + (_, removed_tx) = self.cache_data.cache.popitem(last=False) + except KeyError: + # cache is empty + return + if removed_tx.hash in self.cache_data.dirty_txs: + # write to disk so we don't lose the last update + self.cache_data.dirty_txs.discard(removed_tx.hash) + self._save_transaction_to_db(removed_tx) + + def _update_cache(self, tx: BaseTransaction) -> None: + """Updates the cache making sure it has at most the number of elements configured + as its capacity. + + If we need to evict a tx from cache and it's dirty, write it to disk immediately. + """ + _tx = self.cache_data.cache.get(tx.hash, None) + if not _tx: + if len(self.cache_data.cache) >= self.cache_data.capacity: + self._cache_popitem() + self.cache_data.cache[tx.hash] = tx + else: + # Tx might have been updated + self.cache_data.cache[tx.hash] = tx + self.cache_data.cache.move_to_end(tx.hash, last=True) + + @override + def get_cache_data(self) -> CacheData | None: + return self.cache_data + def _load_from_bytes(self, tx_data: bytes, meta_data: bytes) -> 'BaseTransaction': from hathor.transaction.transaction_metadata import TransactionMetadata @@ -104,6 +192,8 @@ def set_migration_state(self, migration_name: str, state: MigrationState) -> Non def remove_transaction(self, tx: 'BaseTransaction') -> None: super().remove_transaction(tx) + self.cache_data.cache.pop(tx.hash, None) + self.cache_data.dirty_txs.discard(tx.hash) self._db.delete((self._cf_tx, tx.hash)) self._db.delete((self._cf_meta, tx.hash)) self._db.delete((self._cf_static_meta, tx.hash)) @@ -115,10 +205,13 @@ def save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False self._save_to_weakref(tx) def _save_transaction(self, tx: 'BaseTransaction', *, only_metadata: bool = False) -> None: + self._update_cache(tx) + self.cache_data.dirty_txs.add(tx.hash) + + def _save_transaction_to_db(self, tx: 'BaseTransaction') -> None: key = tx.hash - if not only_metadata: - tx_data = self._tx_to_bytes(tx) - self._db.put((self._cf_tx, key), tx_data) + tx_data = self._tx_to_bytes(tx) + self._db.put((self._cf_tx, key), tx_data) meta_data = tx.get_metadata(use_storage=False).to_bytes() self._db.put((self._cf_meta, key), meta_data) @@ -137,15 +230,24 @@ def _load_static_metadata(self, vertex: 'BaseTransaction') -> None: vertex.set_static_metadata(static_metadata) def transaction_exists(self, hash_bytes: bytes) -> bool: + if hash_bytes in self.cache_data.cache: + return True may_exist, _ = self._db.key_may_exist((self._cf_tx, hash_bytes)) if not may_exist: return False tx_exists = self._db.get((self._cf_tx, hash_bytes)) is not None return tx_exists - def _get_transaction(self, hash_bytes: bytes) -> 'BaseTransaction': - tx = self.get_transaction_from_weakref(hash_bytes) - if tx is not None: + def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction: + if tx := self.cache_data.cache.get(hash_bytes): + self.cache_data.cache.move_to_end(hash_bytes, last=True) + self.cache_data.hit += 1 + self._save_to_weakref(tx) + return tx + + if tx := self.get_transaction_from_weakref(hash_bytes): + self.cache_data.hit += 1 + self._update_cache(tx) return tx tx = self._get_transaction_from_db(hash_bytes) @@ -156,6 +258,8 @@ def _get_transaction(self, hash_bytes: bytes) -> 'BaseTransaction': assert tx._static_metadata is not None assert tx.hash == hash_bytes + self.cache_data.miss += 1 + self._update_cache(tx) self._save_to_weakref(tx) return tx @@ -181,8 +285,7 @@ def _get_tx(self, hash_bytes: bytes, tx_data: bytes) -> 'BaseTransaction': return tx def _get_all_transactions(self) -> Iterator['BaseTransaction']: - tx: Optional['BaseTransaction'] - + self._flush_to_storage(self.cache_data.dirty_txs.copy()) items = self._db.iteritems(self._cf_tx) items.seek_to_first() @@ -200,6 +303,7 @@ def _get_all_transactions(self) -> Iterator['BaseTransaction']: yield tx def is_empty(self) -> bool: + self._flush_to_storage(self.cache_data.dirty_txs.copy()) # We consider 3 or less transactions as empty, because we want to ignore the genesis # block and txs keys = self._db.iterkeys(self._cf_tx) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index a7fc0842e..1162ba89b 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -15,15 +15,17 @@ from __future__ import annotations from abc import ABC, abstractmethod, abstractproperty -from collections import deque +from collections import OrderedDict, deque from contextlib import AbstractContextManager +from dataclasses import dataclass from threading import Lock from typing import TYPE_CHECKING, Any, Iterator, Optional, cast from weakref import WeakValueDictionary from structlog import get_logger +from twisted.internet.defer import Deferred -from hathor.execution_manager import ExecutionManager +from hathor.execution_manager import ExecutionManager, non_critical_code from hathor.indexes import IndexesManager from hathor.indexes.height_index import HeightInfo from hathor.profiler import get_cpu_profiler @@ -70,11 +72,28 @@ INDEX_ATTR_PREFIX = 'index_' +@dataclass(slots=True, kw_only=True) +class CacheConfig: + interval: int = 5 + capacity: int = 10000 + + +@dataclass(slots=True, kw_only=True) +class CacheData: + interval: int + capacity: int + cache: OrderedDict[bytes, BaseTransaction] + dirty_txs: set[bytes] # txs that have been modified but are not persisted yet + flush_deferred: Deferred[None] | None = None + hit: int = 0 + miss: int = 0 + + class TransactionStorage(ABC): """Legacy sync interface, please copy @deprecated decorator when implementing methods.""" pubsub: Optional[PubSubManager] - indexes: Optional[IndexesManager] + indexes: IndexesManager _latest_n_height_tips: list[HeightInfo] nc_catalog: Optional['NCBlueprintCatalog'] = None @@ -109,10 +128,12 @@ def __init__( settings: HathorSettings, nc_storage_factory: NCStorageFactory, vertex_children_service: VertexChildrenService, + indexes: IndexesManager, ) -> None: self._settings = settings self._nc_storage_factory = nc_storage_factory self.vertex_children = vertex_children_service + self.indexes = indexes # Weakref is used to guarantee that there is only one instance of each transaction in memory. self._tx_weakref: WeakValueDictionary[bytes, BaseTransaction] = WeakValueDictionary() self._tx_weakref_disabled: bool = False @@ -142,8 +163,6 @@ def __init__( # Internal toggle to choose when to select topological DFS iterator, used only on some tests self._always_use_topological_dfs = False - self._saving_genesis = False - # Migrations instances self._migrations = [cls() for cls in self._migration_factories] @@ -165,11 +184,6 @@ def get_allow_scope(self) -> TxAllowScope: """Get the current allow scope.""" return self._allow_scope - @abstractmethod - def reset_indexes(self) -> None: - """Reset all the indexes, making sure that no persisted value is reused.""" - raise NotImplementedError - @abstractmethod def is_empty(self) -> bool: """True when only genesis is present, useful for checking for a fresh database.""" @@ -188,6 +202,15 @@ def get_migration_state(self, migration_name: str) -> MigrationState: def set_migration_state(self, migration_name: str, state: MigrationState) -> None: raise NotImplementedError + @abstractmethod + def get_cache_data(self) -> CacheData | None: + """Return CacheData if cache is supported and enabled, and None otherwise.""" + raise NotImplementedError + + def set_cache_capacity(self, capacity: int) -> None: + """Change the max number of items in cache, if cache is supported and enabled.""" + raise NotImplementedError + def _check_and_apply_migrations(self) -> None: """Check which migrations have not been run yet and apply them in order.""" from hathor.transaction.storage.exceptions import OutOfOrderMigrationError, PartialMigrationError @@ -301,7 +324,6 @@ def _checked_set_network(self, network: str) -> None: def get_best_block(self) -> Block: """The block with highest score or one of the blocks with highest scores. Can be used for mining.""" - assert self.indexes is not None block_hash = self.indexes.height.get_tip() block = self.get_transaction(block_hash) assert isinstance(block, Block) @@ -310,7 +332,6 @@ def get_best_block(self) -> Block: def _save_or_verify_genesis(self) -> None: """Save all genesis in the storage.""" - self._saving_genesis = True genesis_txs = [ self._construct_genesis_block(), self._construct_genesis_tx1(), @@ -324,10 +345,9 @@ def _save_or_verify_genesis(self) -> None: assert tx == tx2 except TransactionDoesNotExist: self.save_transaction(tx) - self.add_to_indexes(tx) + self.indexes.add_to_non_critical_indexes(tx) tx2 = tx self._genesis_cache[tx2.hash] = tx2 - self._saving_genesis = False def _save_to_weakref(self, tx: BaseTransaction) -> None: """ Save transaction to weakref. @@ -462,8 +482,9 @@ def remove_transaction(self, tx: BaseTransaction) -> None: :param tx: Transaction to be removed """ - if self.indexes is not None: - self.del_from_indexes(tx, remove_all=True, relax_assert=True) + self.indexes.del_from_critical_indexes(tx) + with non_critical_code(self.log): + self.indexes.del_from_non_critical_indexes(tx, remove_all=True) @abstractmethod def transaction_exists(self, hash_bytes: bytes) -> bool: @@ -555,7 +576,6 @@ def get_token_creation_transaction(self, hash_bytes: bytes) -> TokenCreationTran def get_block_by_height(self, height: int) -> Optional[Block]: """Return a block in the best blockchain from the height index. This is fast.""" - assert self.indexes is not None ancestor_hash = self.indexes.height.get(height) return None if ancestor_hash is None else self.get_block(ancestor_hash) @@ -607,12 +627,10 @@ def first_timestamp(self) -> int: raise NotImplementedError def get_best_block_hash(self) -> VertexId: - assert self.indexes is not None return VertexId(self.indexes.height.get_tip()) @abstractmethod def get_n_height_tips(self, n_blocks: int) -> list[HeightInfo]: - assert self.indexes is not None return self.indexes.height.get_n_height_tips(n_blocks) def get_weight_best_block(self) -> float: @@ -621,7 +639,6 @@ def get_weight_best_block(self) -> float: def get_height_best_block(self) -> int: """ Iterate over best block tips and get the highest height """ - assert self.indexes is not None block_info = self.indexes.height.get_height_tip() return block_info.height @@ -710,8 +727,6 @@ def topological_iterator(self) -> Iterator[BaseTransaction]: # is known to be true, but we could add a mechanism similar to what indexes use to know they're # up-to-date and get rid of that assumption so this method can be used without having to make any # assumptions - assert self.indexes is not None - if self._always_use_topological_dfs: self.log.debug('force choosing DFS iterator') return self._topological_sort_dfs() @@ -767,14 +782,6 @@ def _topological_sort_metadata(self) -> Iterator[BaseTransaction]: """ raise NotImplementedError - @abstractmethod - def add_to_indexes(self, tx: BaseTransaction) -> None: - raise NotImplementedError - - @abstractmethod - def del_from_indexes(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: bool = False) -> None: - raise NotImplementedError - @abstractmethod def get_block_count(self) -> int: raise NotImplementedError @@ -890,7 +897,6 @@ def update_last_started_at(self, timestamp: int) -> None: Using this mehtod ensures that the same timestamp is being used and the correct indexes are being selected. """ - assert self.indexes is not None self.set_last_started_at(timestamp) for index in self.indexes.iter_all_indexes(): index_db_name = index.get_db_name() @@ -908,14 +914,10 @@ def flush(self) -> None: def iter_mempool_tips(self) -> Iterator[Transaction]: """Get tx tips in the mempool, using the mempool-tips index""" - assert self.indexes is not None - assert self.indexes.mempool_tips is not None yield from self.indexes.mempool_tips.iter(self) def iter_mempool(self) -> Iterator[Transaction]: """Get all transactions in the mempool, using the mempool-tips index""" - assert self.indexes is not None - assert self.indexes.mempool_tips is not None yield from self.indexes.mempool_tips.iter_all(self) def _construct_genesis_block(self) -> Block: @@ -1085,29 +1087,25 @@ def migrate_vertex_children(self) -> None: class BaseTransactionStorage(TransactionStorage): - indexes: Optional[IndexesManager] - def __init__( self, - indexes: Optional[IndexesManager] = None, pubsub: Optional[Any] = None, *, settings: HathorSettings, nc_storage_factory: NCStorageFactory, vertex_children_service: VertexChildrenService, + indexes: IndexesManager, ) -> None: super().__init__( settings=settings, nc_storage_factory=nc_storage_factory, vertex_children_service=vertex_children_service, + indexes=indexes, ) # Pubsub is used to publish tx voided and winner but it's optional self.pubsub = pubsub - # Indexes. - self.indexes = indexes - # Either save or verify all genesis. self._save_or_verify_genesis() @@ -1115,27 +1113,12 @@ def __init__( @property def latest_timestamp(self) -> int: - assert self.indexes is not None return self.indexes.info.get_latest_timestamp() @property def first_timestamp(self) -> int: - assert self.indexes is not None return self.indexes.info.get_first_timestamp() - @abstractmethod - def _save_transaction(self, tx: BaseTransaction, *, only_metadata: bool = False) -> None: - raise NotImplementedError - - def reset_indexes(self) -> None: - """Reset all indexes. This function should not be called unless you know what you are doing.""" - assert self.indexes is not None, 'Cannot reset indexes because they have not been enabled.' - self.indexes.force_clear_all() - - def remove_cache(self) -> None: - """Remove all caches in case we don't need it.""" - self.indexes = None - def get_n_height_tips(self, n_blocks: int) -> list[HeightInfo]: block = self.get_best_block() if self._latest_n_height_tips: @@ -1150,50 +1133,32 @@ def get_weight_best_block(self) -> float: return super().get_weight_best_block() def get_newest_blocks(self, count: int) -> tuple[list[Block], bool]: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None block_hashes, has_more = self.indexes.sorted_blocks.get_newest(count) blocks = [cast(Block, self.get_transaction(block_hash)) for block_hash in block_hashes] return blocks, has_more def get_newest_txs(self, count: int) -> tuple[list[BaseTransaction], bool]: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None tx_hashes, has_more = self.indexes.sorted_txs.get_newest(count) txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes] return txs, has_more def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[Block], bool]: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None block_hashes, has_more = self.indexes.sorted_blocks.get_older(timestamp, hash_bytes, count) blocks = [cast(Block, self.get_transaction(block_hash)) for block_hash in block_hashes] return blocks, has_more def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None block_hashes, has_more = self.indexes.sorted_blocks.get_newer(timestamp, hash_bytes, count) blocks = [self.get_transaction(block_hash) for block_hash in block_hashes] return blocks, has_more def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None tx_hashes, has_more = self.indexes.sorted_txs.get_older(timestamp, hash_bytes, count) txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes] return txs, has_more def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None tx_hashes, has_more = self.indexes.sorted_txs.get_newer(timestamp, hash_bytes, count) txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes] return txs, has_more @@ -1202,12 +1167,9 @@ def _manually_initialize(self) -> None: self._manually_initialize_indexes() def _manually_initialize_indexes(self) -> None: - if self.indexes is not None: - self.indexes._manually_initialize(self) + self.indexes._manually_initialize(self) def _topological_sort_timestamp_index(self) -> Iterator[BaseTransaction]: - assert self.indexes is not None - cur_timestamp: Optional[int] = None cur_blocks: list[Block] = [] cur_txs: list[Transaction] = [] @@ -1316,39 +1278,13 @@ def _run_topological_sort_dfs(self, root: BaseTransaction, visited: dict[bytes, else: stack.append(txinput) - def add_to_indexes(self, tx: BaseTransaction) -> None: - if self.indexes is None: - if self._saving_genesis: - # XXX: avoid failing on some situations where this is called before we know it's OK to skip - # see: https://github.com/HathorNetwork/hathor-core/pull/436 - return - else: - raise NotImplementedError - assert self.indexes is not None - self.indexes.add_tx(tx) - - def del_from_indexes(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: bool = False) -> None: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None - self.indexes.del_tx(tx, remove_all=remove_all, relax_assert=relax_assert) - def get_block_count(self) -> int: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None return self.indexes.info.get_block_count() def get_tx_count(self) -> int: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None return self.indexes.info.get_tx_count() def get_vertices_count(self) -> int: - if self.indexes is None: - raise NotImplementedError - assert self.indexes is not None return self.indexes.info.get_vertices_count() def get_genesis(self, hash_bytes: bytes) -> Optional[BaseTransaction]: diff --git a/hathor/transaction/storage/traversal.py b/hathor/transaction/storage/traversal.py index b12c81de5..1022ac4d0 100644 --- a/hathor/transaction/storage/traversal.py +++ b/hathor/transaction/storage/traversal.py @@ -17,8 +17,11 @@ import heapq from abc import ABC, abstractmethod from collections import deque +from enum import StrEnum, auto from itertools import chain -from typing import TYPE_CHECKING, Iterable, Iterator, Optional, Union +from typing import TYPE_CHECKING, Iterable, Iterator, Union + +from typing_extensions import assert_never if TYPE_CHECKING: from hathor.transaction import BaseTransaction # noqa: F401 @@ -43,6 +46,11 @@ def __le__(self, other: 'HeapItem') -> bool: return self.key <= other.key +class _WalkOp(StrEnum): + ADD_NEIGHBORS = auto() + SKIP_NEIGHBORS = auto() + + class GenericWalk(ABC): """ A helper class to walk on the DAG. """ @@ -72,7 +80,7 @@ def __init__( self.is_left_to_right = is_left_to_right self._reverse_heap: bool = not self.is_left_to_right - self._ignore_neighbors: Optional['BaseTransaction'] = None + self._walk_op: _WalkOp | None = None @abstractmethod def _push_visit(self, tx: 'BaseTransaction') -> None: @@ -111,7 +119,7 @@ def _get_iterator(self, tx: 'BaseTransaction', *, is_left_to_right: bool) -> Ite return it - def add_neighbors(self, tx: 'BaseTransaction') -> None: + def _add_neighbors(self, tx: 'BaseTransaction') -> None: """ Add neighbors of `tx` to be visited later according to the configuration. """ it = self._get_iterator(tx, is_left_to_right=self.is_left_to_right) @@ -121,11 +129,21 @@ def add_neighbors(self, tx: 'BaseTransaction') -> None: neighbor = self.storage.get_vertex(_hash) self._push_visit(neighbor) - def skip_neighbors(self, tx: 'BaseTransaction') -> None: - """ Mark `tx` to have its neighbors skipped, i.e., they will not be added to be - visited later. `tx` must be equal to the current yielded transaction. + def _set_walk_op(self, op: _WalkOp) -> None: + assert self._walk_op is None, 'walk op is already set' + self._walk_op = op + + def add_neighbors(self) -> None: + """ Mark current item to have its neighbors added, i.e., they will be added to be + visited later. """ - self._ignore_neighbors = tx + self._set_walk_op(_WalkOp.ADD_NEIGHBORS) + + def skip_neighbors(self) -> None: + """ Mark current item to have its neighbors skipped, i.e., they will not be added to be + visited later. + """ + self._set_walk_op(_WalkOp.SKIP_NEIGHBORS) def run(self, root: Union['BaseTransaction', Iterable['BaseTransaction']], *, skip_root: bool = False) -> Iterator['BaseTransaction']: @@ -144,16 +162,21 @@ def run(self, root: Union['BaseTransaction', Iterable['BaseTransaction']], *, if not skip_root: self._push_visit(root) else: - self.add_neighbors(root) + self._add_neighbors(root) while not self._is_empty(): tx = self._pop_visit() yield tx - if not self._ignore_neighbors: - self.add_neighbors(tx) - else: - assert self._ignore_neighbors == tx - self._ignore_neighbors = None + match self._walk_op: + case None: + raise ValueError('you must explicitly add or skip neighbors') + case _WalkOp.ADD_NEIGHBORS: + self._add_neighbors(tx) + self._walk_op = None + case _WalkOp.SKIP_NEIGHBORS: + self._walk_op = None + case _: + assert_never(self._walk_op) class BFSTimestampWalk(GenericWalk): diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index 6bfdb6a30..f2336bf35 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -304,7 +304,7 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - nano_header = self.get_nano_header() json['nc_id'] = nano_header.get_contract_id().hex() json['nc_seqnum'] = nano_header.nc_seqnum - json['nc_blueprint_id'] = nano_header.get_blueprint_id(accept_failed_execution=True).hex() + json['nc_blueprint_id'] = nano_header.get_blueprint_id_for_json().hex() json['nc_method'] = nano_header.nc_method json['nc_args'] = nano_header.nc_args_bytes.hex() json['nc_address'] = get_address_b58_from_bytes(nano_header.nc_address) diff --git a/hathor/transaction/vertex_parser.py b/hathor/transaction/vertex_parser.py index d09e3887f..85850a18a 100644 --- a/hathor/transaction/vertex_parser.py +++ b/hathor/transaction/vertex_parser.py @@ -17,7 +17,7 @@ from struct import error as StructError from typing import TYPE_CHECKING, Type -from hathor.transaction.headers import NanoHeader, VertexBaseHeader, VertexHeaderId +from hathor.transaction.headers import FeeHeader, NanoHeader, VertexBaseHeader, VertexHeaderId if TYPE_CHECKING: from hathor.conf.settings import HathorSettings @@ -37,6 +37,8 @@ def get_supported_headers(settings: HathorSettings) -> dict[VertexHeaderId, Type supported_headers: dict[VertexHeaderId, Type[VertexBaseHeader]] = {} if settings.ENABLE_NANO_CONTRACTS: supported_headers[VertexHeaderId.NANO_HEADER] = NanoHeader + if settings.ENABLE_FEE_BASED_TOKENS: + supported_headers[VertexHeaderId.FEE_HEADER] = FeeHeader return supported_headers @staticmethod diff --git a/hathor/util.py b/hathor/util.py index 3197e51bb..f2c89b88f 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -392,7 +392,7 @@ def skip_until(it: Iterator[T], condition: Callable[[T], bool]) -> Iterator[T]: _DT_ITER_NEXT_WARN = 3 # time in seconds to warn when `next(iter_tx)` takes too long -_DT_LOG_PROGRESS = 30 # time in seconds after which a progress will be logged (it can take longer, but not shorter) +_DT_LOG_PROGRESS = 10 # time in seconds after which a progress will be logged (it can take longer, but not shorter) _DT_YIELD_WARN = 1 # time in seconds to warn when `yield tx` takes too long (which is when processing happens) @@ -455,18 +455,28 @@ def progress( log.info('loaded', count=count, rate=rate, total_dt=dt_total) -def tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: Optional['structlog.stdlib.BoundLogger'] = None, - total: Optional[int] = None) -> Iterator['BaseTransaction']: +def tx_progress( + iter_tx: Iterator['BaseTransaction'], + *, + log: Optional['structlog.stdlib.BoundLogger'] = None, + total: Optional[int] = None, + show_height_and_ts: bool = False, +) -> Iterator['BaseTransaction']: """ Log the progress of a transaction iterator while iterating. """ if log is None: log = logger.new() - yield from _tx_progress(iter_tx, log=log, total=total) + yield from _tx_progress(iter_tx, log=log, total=total, show_height_and_ts=show_height_and_ts) -def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib.BoundLogger', total: Optional[int] - ) -> Iterator['BaseTransaction']: +def _tx_progress( + iter_tx: Iterator['BaseTransaction'], + *, + log: 'structlog.stdlib.BoundLogger', + total: Optional[int], + show_height_and_ts: bool, +) -> Iterator['BaseTransaction']: """ Inner implementation of progress helper. """ t_start = time.time() @@ -477,6 +487,7 @@ def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib count_log_prev = 0 block_count = 0 tx_count = 0 + first_log = True log.debug('load will start') t_log_prev = t_start @@ -499,12 +510,15 @@ def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib t_log = time.time() dt_log = LogDuration(t_log - t_log_prev) - if dt_log > _DT_LOG_PROGRESS: + if first_log or dt_log > _DT_LOG_PROGRESS: + first_log = False t_log_prev = t_log dcount = count - count_log_prev tx_rate = '?' if dt_log == 0 else dcount / dt_log ts = datetime.datetime.fromtimestamp(ts_tx) - kwargs = dict(tx_rate=tx_rate, tx_new=dcount, dt=dt_log, total=count, latest_ts=ts, height=h) + kwargs: dict[str, Any] = dict(tx_rate=tx_rate, tx_new=dcount, dt=dt_log, total=count) + if show_height_and_ts: + kwargs.update(latest_ts=ts, height=h) if total: progress_ = count / total elapsed_time = t_log - t_start @@ -515,7 +529,6 @@ def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib remaining_time = LogDuration(elapsed_time / progress_ - elapsed_time) log.info( f'loading... {math.floor(progress_ * 100):2.0f}%', - progress=progress_, remaining_time=remaining_time, **kwargs ) @@ -544,7 +557,10 @@ def _tx_progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib t_final = time.time() dt_total = LogDuration(t_final - t_start) tx_rate = '?' if dt_total == 0 else count / dt_total - log.info('loaded', tx_count=count, tx_rate=tx_rate, total_dt=dt_total, height=h, blocks=block_count, txs=tx_count) + kwargs = dict(tx_count=count, tx_rate=tx_rate, total_dt=dt_total, blocks=block_count, txs=tx_count) + if show_height_and_ts: + kwargs.update(height=h) + log.info('loaded', **kwargs) class peekable(Iterator[T]): diff --git a/hathor/utils/api.py b/hathor/utils/api.py index a074f4b58..a8e592809 100644 --- a/hathor/utils/api.py +++ b/hathor/utils/api.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import cgi +from email.message import Message from typing import Type, TypeVar, Union from pydantic import Field, ValidationError @@ -38,8 +38,11 @@ def from_request(cls: Type[T], request: Request) -> Union[T, 'ErrorResponse']: encoding = 'utf8' if content_type_header := request.requestHeaders.getRawHeaders('content-type'): - _, options = cgi.parse_header(content_type_header[0]) - encoding = options.get('charset', encoding) + m = Message() + m['content-type'] = content_type_header[0] + encoding_raw = m.get_param('charset', encoding) + assert isinstance(encoding_raw, str) + encoding = encoding_raw raw_args = get_args(request).items() args: dict[str, str | None | list[str]] = {} diff --git a/hathor/verification/nano_header_verifier.py b/hathor/verification/nano_header_verifier.py index 0db78d7bb..6da773a3d 100644 --- a/hathor/verification/nano_header_verifier.py +++ b/hathor/verification/nano_header_verifier.py @@ -76,8 +76,12 @@ def __init__(self, *, settings: HathorSettings, tx_storage: TransactionStorage) self._settings = settings self._tx_storage = tx_storage - def verify_nc_signature(self, tx: BaseTransaction) -> None: + def verify_nc_signature(self, tx: BaseTransaction, params: VerificationParams) -> None: """Verify if the caller's signature is valid.""" + self._verify_nc_signature(self._settings, tx, params) + + @staticmethod + def _verify_nc_signature(settings: HathorSettings, tx: BaseTransaction, params: VerificationParams) -> None: assert tx.is_nano_contract() assert isinstance(tx, Transaction) @@ -91,7 +95,7 @@ def verify_nc_signature(self, tx: BaseTransaction) -> None: ) counter = SigopCounter( - max_multisig_pubkeys=self._settings.MAX_MULTISIG_PUBKEYS, + max_multisig_pubkeys=settings.MAX_MULTISIG_PUBKEYS, enable_checkdatasig_count=True, ) output_script = create_output_script(nano_header.nc_address) @@ -103,7 +107,7 @@ def verify_nc_signature(self, tx: BaseTransaction) -> None: raw_script_eval( input_data=nano_header.nc_script, output_script=output_script, - extras=ScriptExtras(tx=tx) + extras=ScriptExtras(tx=tx, version=params.features.opcodes_version) ) except ScriptError as e: raise NCInvalidSignature from e diff --git a/hathor/verification/token_creation_transaction_verifier.py b/hathor/verification/token_creation_transaction_verifier.py index c8b6c051d..45e56f2d5 100644 --- a/hathor/verification/token_creation_transaction_verifier.py +++ b/hathor/verification/token_creation_transaction_verifier.py @@ -48,7 +48,7 @@ def verify_token_info(self, tx: TokenCreationTransaction, params: VerificationPa # Can't create the token with NATIVE or a non-activated version version_validations = [ tx.token_version == TokenVersion.NATIVE, - tx.token_version == TokenVersion.FEE and not params.enable_nano, + tx.token_version == TokenVersion.FEE and not params.features.fee_tokens, ] if any(version_validations): diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 80e0ff4ad..999b319cf 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -128,13 +128,24 @@ def verify_sigops_input(self, tx: Transaction, enable_checkdatasig_count: bool = raise TooManySigOps( 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(tx.hash_hex, n_txops)) - def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: + def verify_inputs(self, tx: Transaction, params: VerificationParams, *, skip_script: bool = False) -> None: """Verify inputs signatures and ownership and all inputs actually exist""" + self._verify_inputs(self._settings, tx, params, skip_script=skip_script) + + @classmethod + def _verify_inputs( + cls, + settings: HathorSettings, + tx: Transaction, + params: VerificationParams, + *, + skip_script: bool, + ) -> None: spent_outputs: set[tuple[VertexId, int]] = set() for input_tx in tx.inputs: - if len(input_tx.data) > self._settings.MAX_INPUT_DATA_SIZE: + if len(input_tx.data) > settings.MAX_INPUT_DATA_SIZE: raise InvalidInputDataSize('size: {} and max-size: {}'.format( - len(input_tx.data), self._settings.MAX_INPUT_DATA_SIZE + len(input_tx.data), settings.MAX_INPUT_DATA_SIZE )) spent_tx = tx.get_spent_tx(input_tx) @@ -149,7 +160,7 @@ def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: )) if not skip_script: - self.verify_script(tx=tx, input_tx=input_tx, spent_tx=spent_tx) + cls.verify_script(tx=tx, input_tx=input_tx, spent_tx=spent_tx, params=params) # check if any other input in this tx is spending the same output key = (input_tx.tx_id, input_tx.index) @@ -158,7 +169,14 @@ def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: tx.hash_hex, input_tx.tx_id.hex(), input_tx.index)) spent_outputs.add(key) - def verify_script(self, *, tx: Transaction, input_tx: TxInput, spent_tx: BaseTransaction) -> None: + @staticmethod + def verify_script( + *, + tx: Transaction, + input_tx: TxInput, + spent_tx: BaseTransaction, + params: VerificationParams, + ) -> None: """ :type tx: Transaction :type input_tx: TxInput @@ -166,7 +184,7 @@ def verify_script(self, *, tx: Transaction, input_tx: TxInput, spent_tx: BaseTra """ from hathor.transaction.scripts import script_eval try: - script_eval(tx, input_tx, spent_tx) + script_eval(tx, input_tx, spent_tx, params.features.opcodes_version) except ScriptError as e: raise InvalidInputData(e) from e @@ -240,6 +258,7 @@ def verify_output_token_indexes(self, tx: Transaction) -> None: def verify_sum( cls, settings: HathorSettings, + tx: Transaction, token_dict: TokenInfoDict, allow_nonexistent_tokens: bool = False, ) -> None: @@ -264,7 +283,8 @@ def verify_sum( cls._check_token_permissions(token_uid, token_info) match token_info.version: case None: - # when a token is not found, we can't assert the HTR value, since we don't know its version + # When a token is not found, we can't assert the HTR value since we don't know the token version. + # This is only possible for nanos, because they may create the missing token in execution-time. if not allow_nonexistent_tokens: raise TokenNotFound(f'token uid {token_uid.hex()} not found.') has_nonexistent_tokens = True @@ -287,15 +307,18 @@ def verify_sum( # check whether the deposit/withdraw amount is correct htr_expected_amount = withdraw - deposit htr_info = token_dict[settings.HATHOR_TOKEN_UID] - if htr_info.amount < htr_expected_amount: - raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( + if htr_info.amount > htr_expected_amount: + raise InputOutputMismatch('There\'s an invalid surplus of HTR. (amount={}, expected={})'.format( htr_info.amount, htr_expected_amount, )) - # in a partial validation, it's not possible to check fees and - # htr amount since it depends on verification with all token versions if has_nonexistent_tokens: + # In a partial verification, it's not possible to check fees and + # HTR amount since it depends on knowledge of all token versions. + # The skipped checks below are simply postponed to execution-time + # and run when a block confirms the nano tx. + assert tx.is_nano_contract() return expected_fee = token_dict.calculate_fee(settings) @@ -303,8 +326,8 @@ def verify_sum( raise InputOutputMismatch(f"Fee amount is different than expected. " f"(amount={token_dict.fees_from_fee_header}, expected={expected_fee})") - if htr_info.amount > htr_expected_amount: - raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( + if htr_info.amount < htr_expected_amount: + raise InputOutputMismatch('There\'s an invalid deficit of HTR. (amount={}, expected={})'.format( htr_info.amount, htr_expected_amount, )) @@ -333,7 +356,7 @@ def verify_version(self, tx: Transaction, params: VerificationParams) -> None: TxVersion.TOKEN_CREATION_TRANSACTION, } - if params.enable_nano: + if params.features.nanocontracts: allowed_tx_versions.add(TxVersion.ON_CHAIN_BLUEPRINT) if tx.version not in allowed_tx_versions: diff --git a/hathor/verification/verification_params.py b/hathor/verification/verification_params.py index 7c24a06f2..e677d09f2 100644 --- a/hathor/verification/verification_params.py +++ b/hathor/verification/verification_params.py @@ -16,7 +16,9 @@ from dataclasses import dataclass +from hathor.feature_activation.utils import Features from hathor.transaction import Block +from hathor.transaction.scripts.opcode import OpcodesVersion @dataclass(slots=True, frozen=True, kw_only=True) @@ -24,10 +26,9 @@ class VerificationParams: """Contains every parameter/setting to run a single verification.""" nc_block_root_id: bytes | None - enable_checkdatasig_count: bool reject_locked_reward: bool = True skip_block_weight_verification: bool = False - enable_nano: bool = False + features: Features reject_too_old_vertices: bool = False harden_token_restrictions: bool = False @@ -35,12 +36,7 @@ class VerificationParams: reject_conflicts_with_confirmed_txs: bool = False @classmethod - def default_for_mempool( - cls, - *, - best_block: Block, - enable_nano: bool = False, - ) -> VerificationParams: + def default_for_mempool(cls, *, best_block: Block, features: Features | None = None) -> VerificationParams: """This is the appropriate parameters for verifying mempool transactions, realtime blocks and API pushes. Other cases should instantiate `VerificationParams` manually with the appropriate parameter values. @@ -48,10 +44,18 @@ def default_for_mempool( best_block_meta = best_block.get_metadata() if best_block_meta.nc_block_root_id is None: assert best_block.is_genesis + + if features is None: + features = Features( + count_checkdatasig_op=True, + nanocontracts=True, + fee_tokens=False, + opcodes_version=OpcodesVersion.V2, + ) + return cls( nc_block_root_id=best_block_meta.nc_block_root_id, - enable_checkdatasig_count=True, - enable_nano=enable_nano, + features=features, reject_too_old_vertices=True, harden_token_restrictions=True, harden_nano_restrictions=True, diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 5d4f196da..6f5ec9476 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -260,13 +260,14 @@ def _verify_tx( # TODO do genesis validation return self.verify_without_storage(tx, params) - self.verifiers.tx.verify_sigops_input(tx, params.enable_checkdatasig_count) - self.verifiers.tx.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist + self.verifiers.tx.verify_sigops_input(tx, params.features.count_checkdatasig_op) + self.verifiers.tx.verify_inputs(tx, params) # need to run verify_inputs first to check if all inputs exist self.verifiers.tx.verify_version(tx, params) block_storage = self._get_block_storage(params) self.verifiers.tx.verify_sum( self._settings, + tx, token_dict or tx.get_complete_token_info(block_storage), # if this tx isn't a nano contract we assume we can find all the tokens to validate this tx allow_nonexistent_tokens=tx.is_nano_contract() @@ -319,14 +320,14 @@ def verify_without_storage(self, vertex: BaseTransaction, params: VerificationPa if vertex.is_nano_contract(): assert self._settings.ENABLE_NANO_CONTRACTS - self._verify_without_storage_nano_header(vertex) + self._verify_without_storage_nano_header(vertex, params) def _verify_without_storage_base_block(self, block: Block, params: VerificationParams) -> None: self.verifiers.block.verify_no_inputs(block) self.verifiers.vertex.verify_outputs(block) self.verifiers.block.verify_output_token_indexes(block) self.verifiers.block.verify_data(block) - self.verifiers.vertex.verify_sigops_output(block, params.enable_checkdatasig_count) + self.verifiers.vertex.verify_sigops_output(block, params.features.count_checkdatasig_op) def _verify_without_storage_block(self, block: Block, params: VerificationParams) -> None: """ Run all verifications that do not need a storage. @@ -348,7 +349,7 @@ def _verify_without_storage_tx(self, tx: Transaction, params: VerificationParams self.verifiers.tx.verify_number_of_inputs(tx) self.verifiers.vertex.verify_outputs(tx) self.verifiers.tx.verify_output_token_indexes(tx) - self.verifiers.vertex.verify_sigops_output(tx, params.enable_checkdatasig_count) + self.verifiers.vertex.verify_sigops_output(tx, params.features.count_checkdatasig_op) self.verifiers.tx.verify_tokens(tx, params) def _verify_without_storage_token_creation_tx( @@ -358,9 +359,9 @@ def _verify_without_storage_token_creation_tx( ) -> None: self._verify_without_storage_tx(tx, params) - def _verify_without_storage_nano_header(self, tx: BaseTransaction) -> None: + def _verify_without_storage_nano_header(self, tx: BaseTransaction, params: VerificationParams) -> None: assert tx.is_nano_contract() - self.verifiers.nano_header.verify_nc_signature(tx) + self.verifiers.nano_header.verify_nc_signature(tx, params) self.verifiers.nano_header.verify_actions(tx) def _verify_without_storage_fee_header(self, tx: BaseTransaction) -> None: diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py index 04d7d1a86..e8045d914 100644 --- a/hathor/verification/vertex_verifier.py +++ b/hathor/verification/vertex_verifier.py @@ -226,8 +226,9 @@ def get_allowed_headers(self, vertex: BaseTransaction, params: VerificationParam case TxVersion.ON_CHAIN_BLUEPRINT: pass case TxVersion.REGULAR_TRANSACTION | TxVersion.TOKEN_CREATION_TRANSACTION: - if params.enable_nano: + if params.features.nanocontracts: allowed_headers.add(NanoHeader) + if params.features.fee_tokens: allowed_headers.add(FeeHeader) case _: # pragma: no cover assert_never(vertex.version) diff --git a/hathor/version.py b/hathor/version.py index c368bbe56..41f6e1215 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -19,7 +19,7 @@ from structlog import get_logger -BASE_VERSION = '0.68.4' +BASE_VERSION = '0.69.0' DEFAULT_VERSION_SUFFIX = "local" BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" diff --git a/hathor/version_resource.py b/hathor/version_resource.py index 8e28da181..52a606248 100644 --- a/hathor/version_resource.py +++ b/hathor/version_resource.py @@ -17,8 +17,8 @@ from hathor.api_util import Resource, set_cors from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature_service import FeatureService +from hathor.feature_activation.utils import Features from hathor.manager import HathorManager -from hathor.nanocontracts.utils import is_nano_active from hathor.util import json_dumpb @@ -46,9 +46,10 @@ def render_GET(self, request): set_cors(request, 'GET') best_block = self.manager.tx_storage.get_best_block() - nano_contracts_enabled = is_nano_active( - settings=self._settings, block=best_block, feature_service=self.feature_service + features = Features.from_vertex( + settings=self._settings, vertex=best_block, feature_service=self.feature_service ) + nano_contracts_enabled = features.nanocontracts data = { 'version': hathor.__version__, diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index bd5e7cdc0..1435f1f1f 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import dataclasses import datetime from dataclasses import replace from typing import Any, Generator @@ -22,20 +23,20 @@ from hathor.conf.settings import HathorSettings from hathor.consensus import ConsensusAlgorithm +from hathor.consensus.consensus import ConsensusEvent from hathor.exception import HathorError, InvalidNewTransaction -from hathor.execution_manager import ExecutionManager -from hathor.feature_activation.feature import Feature +from hathor.execution_manager import ExecutionManager, non_critical_code from hathor.feature_activation.feature_service import FeatureService -from hathor.nanocontracts.utils import is_nano_active +from hathor.feature_activation.utils import Features from hathor.profiler import get_cpu_profiler from hathor.pubsub import HathorEvents, PubSubManager from hathor.reactor import ReactorProtocol from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.scripts.opcode import OpcodesVersion from hathor.transaction.storage import TransactionStorage from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.verification.verification_params import VerificationParams from hathor.verification.verification_service import VerificationService -from hathor.wallet import BaseWallet logger = get_logger() cpu = get_cpu_profiler() @@ -52,7 +53,6 @@ class VertexHandler: '_feature_service', '_pubsub', '_execution_manager', - '_wallet', '_log_vertex_bytes', ) @@ -67,7 +67,6 @@ def __init__( feature_service: FeatureService, pubsub: PubSubManager, execution_manager: ExecutionManager, - wallet: BaseWallet | None, log_vertex_bytes: bool = False, ) -> None: self._log = logger.new() @@ -79,7 +78,6 @@ def __init__( self._feature_service = feature_service self._pubsub = pubsub self._execution_manager = execution_manager - self._wallet = wallet self._log_vertex_bytes = log_vertex_bytes @cpu.profiler('on_new_block') @@ -90,23 +88,17 @@ def on_new_block(self, block: Block, *, deps: list[Transaction]) -> Generator[An parent_block = self._tx_storage.get_block(parent_block_hash) parent_meta = parent_block.get_metadata() - enable_checkdatasig_count = self._feature_service.is_feature_active( - vertex=parent_block, - feature=Feature.COUNT_CHECKDATASIG_OP, - ) - - enable_nano = is_nano_active( - settings=self._settings, block=parent_block, feature_service=self._feature_service - ) - if parent_meta.nc_block_root_id is None: # This case only happens for the genesis and during sync of a voided chain. assert parent_block.is_genesis or parent_meta.voided_by params = VerificationParams( - enable_checkdatasig_count=enable_checkdatasig_count, - enable_nano=enable_nano, nc_block_root_id=parent_meta.nc_block_root_id, + features=Features.from_vertex( + settings=self._settings, + feature_service=self._feature_service, + vertex=parent_block, + ), ) for tx in deps: @@ -125,10 +117,14 @@ def on_new_block(self, block: Block, *, deps: list[Transaction]) -> Generator[An def on_new_mempool_transaction(self, tx: Transaction) -> bool: """Called by mempool sync.""" best_block = self._tx_storage.get_best_block() - enable_nano = is_nano_active(settings=self._settings, block=best_block, feature_service=self._feature_service) + features = Features.from_vertex( + settings=self._settings, + feature_service=self._feature_service, + vertex=best_block, + ) params = VerificationParams.default_for_mempool( - enable_nano=enable_nano, best_block=best_block, + features=dataclasses.replace(features, opcodes_version=OpcodesVersion.V2), ) return self._old_on_new_vertex(tx, params) @@ -143,15 +139,18 @@ def on_new_relayed_vertex( """Called for unsolicited vertex received, usually due to real time relay.""" best_block = self._tx_storage.get_best_block() best_block_meta = best_block.get_metadata() - enable_nano = is_nano_active(settings=self._settings, block=best_block, feature_service=self._feature_service) if best_block_meta.nc_block_root_id is None: assert best_block.is_genesis - # XXX: checkdatasig enabled for relayed vertices + + features = Features.from_vertex( + settings=self._settings, + feature_service=self._feature_service, + vertex=best_block, + ) params = VerificationParams( - enable_checkdatasig_count=True, reject_locked_reward=reject_locked_reward, - enable_nano=enable_nano, nc_block_root_id=best_block_meta.nc_block_root_id, + features=dataclasses.replace(features, opcodes_version=OpcodesVersion.V2), ) return self._old_on_new_vertex(vertex, params, quiet=quiet) @@ -174,8 +173,8 @@ def _old_on_new_vertex( return False try: - self._unsafe_save_and_run_consensus(vertex) - self._post_consensus(vertex, params, quiet=quiet) + consensus_events = self._unsafe_save_and_run_consensus(vertex) + self._post_consensus(vertex, params, consensus_events, quiet=quiet) except BaseException: self._log.error('unexpected exception in on_new_vertex()', vertex=vertex) meta = vertex.get_metadata() @@ -217,7 +216,7 @@ def _validate_vertex(self, vertex: BaseTransaction, params: VerificationParams) return True - def _unsafe_save_and_run_consensus(self, vertex: BaseTransaction) -> None: + def _unsafe_save_and_run_consensus(self, vertex: BaseTransaction) -> list[ConsensusEvent]: """ This method is considered unsafe because the caller is responsible for crashing the full node if this method throws any exception. @@ -228,13 +227,15 @@ def _unsafe_save_and_run_consensus(self, vertex: BaseTransaction) -> None: # then I would have a children that was not in the storage vertex.update_initial_metadata(save=False) self._tx_storage.save_transaction(vertex) - self._tx_storage.add_to_indexes(vertex) - self._consensus.unsafe_update(vertex) + with non_critical_code(self._log): + self._tx_storage.indexes.add_to_non_critical_indexes(vertex) + return self._consensus.unsafe_update(vertex) def _post_consensus( self, vertex: BaseTransaction, params: VerificationParams, + consensus_events: list[ConsensusEvent], *, quiet: bool, ) -> None: @@ -245,22 +246,22 @@ def _post_consensus( """ # XXX: during post consensus we don't need to verify weights again, so we can disable it params = replace(params, skip_block_weight_verification=True) - assert self._tx_storage.indexes is not None assert self._verification_service.validate_full( vertex, params, init_static_metadata=False, ) - self._tx_storage.indexes.update(vertex) - # Publish to pubsub manager the new tx accepted, now that it's full validated - self._pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=vertex) + self._tx_storage.indexes.update_critical_indexes(vertex) + with non_critical_code(self._log): + self._tx_storage.indexes.update_non_critical_indexes(vertex) - if self._wallet: - # TODO Remove it and use pubsub instead. - self._wallet.on_new_tx(vertex) + self._pubsub.publish(HathorEvents.NETWORK_NEW_TX_PROCESSING, tx=vertex) + for event in consensus_events: + self._pubsub.publish(event.event, **event.kwargs) + self._pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=vertex) - self._log_new_object(vertex, 'new {}', quiet=quiet) + self._log_new_object(vertex, 'new {}', quiet=quiet) def _log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) -> None: """ A shortcut for logging additional information for block/txs. diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index d9860e776..0ea4c0024 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -155,10 +155,9 @@ def _render_POST_stratum(self, context: _Context) -> None: # When using stratum to solve pow, we already set timestamp and parents stratum_deferred: Deferred[None] = Deferred() - # FIXME: Skipping mypy on the line below for now, as it looks like it's wrong but we don't have tests for it. - stratum_deferred.addCallback(self._stratum_deferred_resolve, request) # type: ignore - - fn_timeout = partial(self._stratum_timeout, request=request, tx=tx) + # FIXME: Skipping mypy on the lines below for now, as it looks like it's wrong but we don't have tests for it. + stratum_deferred.addCallback(self._stratum_deferred_resolve, request) # type: ignore[call-overload] + fn_timeout = partial(self._stratum_timeout, request=request, tx=tx) # type: ignore[call-arg] stratum_deferred.addTimeout(TIMEOUT_STRATUM_RESOLVE_POW, self.manager.reactor, onTimeoutCancel=fn_timeout) # this prepares transaction for mining diff --git a/hathor/websocket/iterators.py b/hathor/websocket/iterators.py index 41f6e7298..14cf2e4c1 100644 --- a/hathor/websocket/iterators.py +++ b/hathor/websocket/iterators.py @@ -130,7 +130,6 @@ async def gap_limit_search( ) -> AddressSearch: """An async iterator that yields addresses and vertices, stopping when the gap limit is reached. """ - assert manager.tx_storage.indexes is not None assert manager.tx_storage.indexes.addresses is not None addresses_index = manager.tx_storage.indexes.addresses empty_addresses_counter = 0 diff --git a/hathor_cli/builder.py b/hathor_cli/builder.py index 529447b16..e0bb6de07 100644 --- a/hathor_cli/builder.py +++ b/hathor_cli/builder.py @@ -20,6 +20,7 @@ from structlog import get_logger +from hathor.transaction.storage.rocksdb_storage import CacheConfig from hathor_cli.run_node_args import RunNodeArgs from hathor_cli.side_dag import SideDagArgs from hathor.consensus import ConsensusAlgorithm @@ -86,7 +87,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: from hathor.p2p.netfilter.utils import add_peer_id_blacklist from hathor.p2p.peer_discovery import BootstrapPeerDiscovery, DNSPeerDiscovery from hathor.storage import RocksDBStorage - from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage, TransactionStorage + from hathor.transaction.storage import TransactionRocksDBStorage, TransactionStorage from hathor.util import get_environment_info settings = get_global_settings() @@ -139,18 +140,23 @@ def create_manager(self, reactor: Reactor) -> HathorManager: indexes = RocksDBIndexesManager(self.rocksdb_storage, settings=settings) vertex_children_service = RocksDBVertexChildrenService(self.rocksdb_storage) - kwargs: dict[str, Any] = {} - if self._args.disable_cache: - # We should only pass indexes if cache is disabled. Otherwise, - # only TransactionCacheStorage should have indexes. - kwargs['indexes'] = indexes + cache_config: CacheConfig | None = None + if not self._args.disable_cache: + cache_config = CacheConfig( + capacity=self._args.cache_size if self._args.cache_size is not None else DEFAULT_CACHE_SIZE, + ) + if self._args.cache_interval: + cache_config.interval = self._args.cache_interval + self.log.info('with cache', capacity=cache_config.capacity, interval=cache_config.interval) tx_storage = TransactionRocksDBStorage( - self.rocksdb_storage, + reactor=reactor, + rocksdb_storage=self.rocksdb_storage, settings=settings, vertex_parser=vertex_parser, nc_storage_factory=self.nc_storage_factory, vertex_children_service=vertex_children_service, - **kwargs + indexes=indexes, + cache_config=cache_config, ) event_storage = EventRocksDBStorage(self.rocksdb_storage) feature_storage = FeatureActivationStorage(settings=settings, rocksdb_storage=self.rocksdb_storage) @@ -164,20 +170,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.check_or_raise(self._args.cache_size is None, 'cannot use --disable-cache with --cache-size') self.check_or_raise(self._args.cache_interval is None, 'cannot use --disable-cache with --cache-interval') - if not self._args.disable_cache: - tx_storage = TransactionCacheStorage( - tx_storage, - reactor, - indexes=indexes, - settings=settings, - nc_storage_factory=self.nc_storage_factory, - vertex_children_service=vertex_children_service, - ) - tx_storage.capacity = self._args.cache_size if self._args.cache_size is not None else DEFAULT_CACHE_SIZE - if self._args.cache_interval: - tx_storage.interval = self._args.cache_interval - self.log.info('with cache', capacity=tx_storage.capacity, interval=tx_storage.interval) - self.tx_storage = tx_storage self.log.info('with indexes', indexes_class=type(tx_storage.indexes).__name__) @@ -223,15 +215,15 @@ def create_manager(self, reactor: Reactor) -> HathorManager: execution_manager=execution_manager, ) - if self._args.wallet_index and tx_storage.indexes is not None: + if self._args.wallet_index: self.log.debug('enable wallet indexes') self.enable_wallet_index(tx_storage.indexes, pubsub) - if self._args.utxo_index and tx_storage.indexes is not None: + if self._args.utxo_index: self.log.debug('enable utxo index') tx_storage.indexes.enable_utxo_index() - if self._args.nc_indexes and tx_storage.indexes is not None: + if self._args.nc_indexes: self.log.debug('enable nano indexes') tx_storage.indexes.enable_nc_indexes() @@ -257,13 +249,13 @@ def create_manager(self, reactor: Reactor) -> HathorManager: consensus_algorithm = ConsensusAlgorithm( self.nc_storage_factory, soft_voided_tx_ids, - pubsub=pubsub, settings=settings, runner_factory=runner_factory, nc_log_storage=nc_log_storage, nc_calls_sorter=nc_calls_sorter, feature_service=self.feature_service, nc_exec_fail_trace=self._args.nc_exec_fail_trace, + tx_storage=tx_storage, ) if self._args.x_enable_event_queue or self._args.enable_event_queue: @@ -324,7 +316,6 @@ def create_manager(self, reactor: Reactor) -> HathorManager: feature_service=self.feature_service, pubsub=pubsub, execution_manager=execution_manager, - wallet=self.wallet, log_vertex_bytes=self._args.log_vertex_bytes, ) diff --git a/hathor_cli/mining.py b/hathor_cli/mining.py index 2ee1f5ce8..2620016c6 100644 --- a/hathor_cli/mining.py +++ b/hathor_cli/mining.py @@ -140,9 +140,16 @@ def execute(args: Namespace) -> None: from hathor.verification.verification_params import VerificationParams from hathor.verification.verification_service import VerificationService from hathor.verification.vertex_verifiers import VertexVerifiers + from hathor.feature_activation.utils import Features + from hathor.transaction.scripts.opcode import OpcodesVersion settings = get_global_settings() daa = DifficultyAdjustmentAlgorithm(settings=settings) - verification_params = VerificationParams(nc_block_root_id=None, enable_checkdatasig_count=True) + verification_params = VerificationParams(nc_block_root_id=None, features=Features( + count_checkdatasig_op=True, + nanocontracts=False, + fee_tokens=False, + opcodes_version=OpcodesVersion.V2, + )) verifiers = VertexVerifiers.create_defaults( reactor=Mock(), settings=settings, diff --git a/hathor_tests/consensus/test_non_critical_errors.py b/hathor_tests/consensus/test_non_critical_errors.py new file mode 100644 index 000000000..47cc0e267 --- /dev/null +++ b/hathor_tests/consensus/test_non_critical_errors.py @@ -0,0 +1,103 @@ +# Copyright 2026 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +import pytest +from structlog.testing import capture_logs + +from hathor.transaction import Transaction +from hathor_tests import unittest +from hathor_tests.dag_builder.builder import TestDAGBuilder + + +class TestNonCriticalErrors(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + builder = self.get_builder().enable_address_index() + self.manager = self.create_peer_from_builder(builder) + self.indexes = self.manager.tx_storage.indexes + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + def test_error_on_critical_index(self) -> None: + artifacts = self.dag_builder.build_from_str(''' + blockchain genesis b[1..10] + b10 < dummy + + tx1 < tx2 < tx3 < tx4 + ''') + + tx1, tx2, tx3, tx4 = artifacts.get_typed_vertices(('tx1', 'tx2', 'tx3', 'tx4'), Transaction) + artifacts.propagate_with(self.manager, up_to='tx2') + + def update(*args: Any, **kwargs: Any) -> None: + raise Exception('test error') + + self.indexes.mempool_tips.update = update # type: ignore[method-assign] + + with pytest.raises(SystemExit), capture_logs() as log_list: + artifacts.propagate_with(self.manager) + + logs = '\n'.join(map(str, log_list)) + assert 'unexpected exception in on_new_vertex()' in logs + assert ( + 'Critical failure occurred, causing the full node to halt execution. Manual intervention is required.' + ) in logs + + assert tx1.get_metadata().validation.is_fully_connected() + assert tx1.get_metadata().voided_by is None + + assert tx2.get_metadata().validation.is_fully_connected() + assert tx2.get_metadata().voided_by is None + + assert tx3.get_metadata().validation.is_fully_connected() + assert tx3.get_metadata().voided_by == {self._settings.CONSENSUS_FAIL_ID} + + assert tx4.get_metadata().validation.is_initial() + assert tx4.get_metadata().voided_by is None + + def test_error_on_non_critical_index(self) -> None: + artifacts = self.dag_builder.build_from_str(''' + blockchain genesis b[1..10] + b10 < dummy + + tx1 < tx2 < tx3 < tx4 + ''') + + tx1, tx2, tx3, tx4 = artifacts.get_typed_vertices(('tx1', 'tx2', 'tx3', 'tx4'), Transaction) + artifacts.propagate_with(self.manager, up_to='tx2') + + def add_tx(*args: Any, **kwargs: Any) -> None: + raise Exception('test error') + + assert self.indexes.addresses is not None + self.indexes.addresses.add_tx = add_tx # type: ignore[method-assign] + + with capture_logs() as log_list: + artifacts.propagate_with(self.manager) + + logs = '\n'.join(map(str, log_list)) + assert 'ignoring error in non-critical code' in logs + + assert tx1.get_metadata().validation.is_fully_connected() + assert tx1.get_metadata().voided_by is None + + assert tx2.get_metadata().validation.is_fully_connected() + assert tx2.get_metadata().voided_by is None + + assert tx3.get_metadata().validation.is_fully_connected() + assert tx3.get_metadata().voided_by is None + + assert tx4.get_metadata().validation.is_fully_connected() + assert tx4.get_metadata().voided_by is None diff --git a/hathor_tests/consensus/test_soft_voided.py b/hathor_tests/consensus/test_soft_voided.py index 0c2c042d6..4317c2203 100644 --- a/hathor_tests/consensus/test_soft_voided.py +++ b/hathor_tests/consensus/test_soft_voided.py @@ -54,6 +54,8 @@ def _run_test( yield gen_tx2 + gen_tx2.stop() + self.assertEqual(1, len(soft_voided_tx_ids)) txA_hash = list(soft_voided_tx_ids)[0] @@ -84,6 +86,7 @@ def _run_test( assert manager2.wallet is not None address = manager2.wallet.get_unused_address(mark_as_used=False) value = 1 + simulator.run_to_completion() txC = gen_new_tx(manager2, address, value) txC.parents[0] = txA.hash txC.timestamp = max(txC.timestamp, txA.timestamp + 1) diff --git a/hathor_tests/event/test_base_event.py b/hathor_tests/event/test_base_event.py index a5311738f..6608b27ba 100644 --- a/hathor_tests/event/test_base_event.py +++ b/hathor_tests/event/test_base_event.py @@ -38,6 +38,7 @@ def test_create_base_event(event_id: int, group_id: int | None) -> None: type='VERTEX_METADATA_CHANGED', data=dict( hash='abc', + name='tx name', nonce=123, timestamp=456, signal_bits=0, diff --git a/hathor_tests/event/websocket/test_factory.py b/hathor_tests/event/websocket/test_factory.py index 03eef0ae5..4c9f33791 100644 --- a/hathor_tests/event/websocket/test_factory.py +++ b/hathor_tests/event/websocket/test_factory.py @@ -74,7 +74,8 @@ def test_broadcast_event(can_receive_event: bool) -> None: factory.broadcast_event(event) if not can_receive_event: - return connection.send_event_response.assert_not_called() + connection.send_event_response.assert_not_called() + return response = EventResponse( peer_id='my_peer_id', @@ -136,7 +137,8 @@ def test_send_next_event_to_connection(next_expected_event_id: int, can_receive_ clock.advance(0) if not can_receive_event or next_expected_event_id > n_starting_events - 1: - return connection.send_event_response.assert_not_called() + connection.send_event_response.assert_not_called() + return calls = [] for _id in range(next_expected_event_id, n_starting_events): diff --git a/hathor_tests/event/websocket/test_protocol.py b/hathor_tests/event/websocket/test_protocol.py index e7cf786f2..81da9b786 100644 --- a/hathor_tests/event/websocket/test_protocol.py +++ b/hathor_tests/event/websocket/test_protocol.py @@ -102,7 +102,7 @@ def test_send_event_response() -> None: b'"timestamp":456,"signal_bits":0,"version":1,"weight":10.0,"inputs":[],"outputs":[],' b'"parents":[],' b'"tokens":[],"token_name":null,"token_symbol":null,"aux_pow":null,"headers":[],' - b'"metadata":{"hash":"abc",' + b'"name":"tx name","metadata":{"hash":"abc",' b'"spent_outputs":[],"conflict_with":[],"voided_by":[],"received_by":[],' b'"twins":[],"accumulated_weight":10.0,"score":20.0,"accumulated_weight_raw":"1024",' b'"score_raw":"1048576","first_block":null,"height":100,' diff --git a/hathor_tests/feature_activation/test_feature_service.py b/hathor_tests/feature_activation/test_feature_service.py index 188e8733a..636b20b63 100644 --- a/hathor_tests/feature_activation/test_feature_service.py +++ b/hathor_tests/feature_activation/test_feature_service.py @@ -39,7 +39,7 @@ def get_storage(settings: HathorSettings, *, up_to_height: int) -> TransactionStorage: artifacts = TestBuilder(settings).build() storage = artifacts.tx_storage - indexes = not_none(artifacts.indexes) + indexes = artifacts.indexes feature_activation_bits = [ 0b0000, # 0: boundary block 0b0010, diff --git a/hathor_tests/nanocontracts/blueprints/unittest.py b/hathor_tests/nanocontracts/blueprints/unittest.py index 034c4d5b9..3e88fc525 100644 --- a/hathor_tests/nanocontracts/blueprints/unittest.py +++ b/hathor_tests/nanocontracts/blueprints/unittest.py @@ -1,4 +1,4 @@ -from io import TextIOWrapper +from io import StringIO, TextIOWrapper from typing import Sequence from hathor.crypto.util import decode_address @@ -90,7 +90,7 @@ def register_blueprint_file(self, path: str, blueprint_id: BlueprintId | None = def _register_blueprint_contents( self, - contents: TextIOWrapper, + contents: TextIOWrapper | StringIO, blueprint_id: BlueprintId | None = None, *, skip_verification: bool = False, @@ -123,7 +123,11 @@ def _register_blueprint_contents( def build_runner(self) -> TestRunner: """Create a Runner instance.""" - return TestRunner(tx_storage=self.manager.tx_storage, settings=self._settings, reactor=self.reactor) + return TestRunner( + tx_storage=self.manager.tx_storage, + settings=self._settings, + reactor=self.reactor, + ) def gen_random_token_uid(self) -> TokenUid: """Generate a random token UID (32 bytes).""" diff --git a/hathor_tests/nanocontracts/sorter_determinism.py b/hathor_tests/nanocontracts/sorter_determinism.py new file mode 100644 index 000000000..fdbde29a2 --- /dev/null +++ b/hathor_tests/nanocontracts/sorter_determinism.py @@ -0,0 +1,16 @@ +from hathor.nanocontracts.sorter.random_sorter import NCBlockSorter + +nc_hashes_list = [ + b'a'*32, + b'b'*32, + b'c'*32, + b'd'*32, +] + +sorter = NCBlockSorter(set(nc_hashes_list)) + +for i in range(1, len(nc_hashes_list)): + sorter.add_edge(nc_hashes_list[i], nc_hashes_list[0]) + +seed = b'x' * 32 +print(sorter.generate_random_topological_order(seed)) diff --git a/hathor_tests/nanocontracts/test_actions.py b/hathor_tests/nanocontracts/test_actions.py index 485beabe5..934c0f0ea 100644 --- a/hathor_tests/nanocontracts/test_actions.py +++ b/hathor_tests/nanocontracts/test_actions.py @@ -18,6 +18,7 @@ import pytest +from hathor.feature_activation.utils import Features from hathor.indexes.tokens_index import TokensIndex from hathor.nanocontracts import HATHOR_TOKEN_UID, NC_EXECUTION_FAIL_ID, Blueprint, Context, public from hathor.nanocontracts.catalog import NCBlueprintCatalog @@ -28,6 +29,7 @@ from hathor.transaction import Block, Transaction, TxInput, TxOutput from hathor.transaction.exceptions import InvalidToken from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.scripts.opcode import OpcodesVersion from hathor.util import not_none from hathor.verification.nano_header_verifier import MAX_ACTIONS_LEN from hathor.verification.verification_params import VerificationParams @@ -83,7 +85,6 @@ def setUp(self) -> None: self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ self.bp_id: MyBlueprint }) - assert self.manager.tx_storage.indexes is not None self.tokens_index: TokensIndex = not_none(self.manager.tx_storage.indexes.tokens) self.nc_seqnum = 0 @@ -119,8 +120,13 @@ def setUp(self) -> None: ) best_block = self.manager.tx_storage.get_best_block() self.verification_params = VerificationParams.default_for_mempool( - enable_nano=True, best_block=best_block, + features=Features( + count_checkdatasig_op=False, + nanocontracts=True, + fee_tokens=False, + opcodes_version=OpcodesVersion.V1, + ) ) # We finish a manual setup of tx1, so it can be used directly in verification methods. diff --git a/hathor_tests/nanocontracts/test_blueprint_syntax.py b/hathor_tests/nanocontracts/test_blueprint_syntax.py index d8ab62090..5fc6e67d3 100644 --- a/hathor_tests/nanocontracts/test_blueprint_syntax.py +++ b/hathor_tests/nanocontracts/test_blueprint_syntax.py @@ -113,7 +113,7 @@ def test_public_missing_self(self) -> None: with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): class MyBlueprint(Blueprint): @public - def initialize() -> None: # type: ignore + def initialize() -> None: pass def test_public_wrong_self(self) -> None: @@ -129,7 +129,7 @@ def test_public_typed_self(self) -> None: with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): class MyBlueprint(Blueprint): @public - def initialize(self: int) -> None: # type: ignore + def initialize(self: int) -> None: pass def test_view_missing_self(self) -> None: @@ -141,7 +141,7 @@ def initialize(self, ctx: Context) -> None: pass @view - def nop() -> None: # type: ignore + def nop() -> None: pass def test_view_wrong_self(self) -> None: @@ -165,7 +165,7 @@ def initialize(self, ctx: Context) -> None: pass @view - def nop(self: int) -> None: # type: ignore + def nop(self: int) -> None: pass def test_fallback_missing_self(self) -> None: @@ -177,7 +177,7 @@ def initialize(self, ctx: Context) -> None: pass @fallback - def fallback() -> None: # type: ignore + def fallback() -> None: pass def test_fallback_wrong_self(self) -> None: @@ -201,7 +201,7 @@ def initialize(self, ctx: Context) -> None: pass @fallback - def fallback(self: int) -> None: # type: ignore + def fallback(self: int) -> None: pass def test_public_missing_context(self) -> None: diff --git a/hathor_tests/nanocontracts/test_exposed_properties.py b/hathor_tests/nanocontracts/test_exposed_properties.py index 9e4764cfd..394c984a2 100644 --- a/hathor_tests/nanocontracts/test_exposed_properties.py +++ b/hathor_tests/nanocontracts/test_exposed_properties.py @@ -118,6 +118,7 @@ 'hathor.SignedData.some_new_attribute', 'hathor.export.some_new_attribute', 'hathor.fallback.some_new_attribute', + 'hathor.json_dumps.some_new_attribute', 'hathor.public.some_new_attribute', 'hathor.sha3.some_new_attribute', 'hathor.verify_ecdsa.some_new_attribute', @@ -148,6 +149,14 @@ 'hathor.SignedData._is_protocol', ]) + +# XXX: these only appear in Python 3.13 +if version_info[1] == 13: + KNOWN_CASES.extend([ + 'hathor.NCActionType._hashable_values_', + 'hathor.NCActionType._unhashable_values_map_', + ]) + KNOWN_CASES.sort() diff --git a/hathor_tests/nanocontracts/test_nano_feature_activation.py b/hathor_tests/nanocontracts/test_feature_activations.py similarity index 64% rename from hathor_tests/nanocontracts/test_nano_feature_activation.py rename to hathor_tests/nanocontracts/test_feature_activations.py index e91e09547..49abb05f6 100644 --- a/hathor_tests/nanocontracts/test_nano_feature_activation.py +++ b/hathor_tests/nanocontracts/test_feature_activations.py @@ -14,7 +14,8 @@ import pytest -from hathor.conf.settings import NanoContractsSetting +from hathor.conf.settings import FeatureSetting +from hathor.crypto.util import decode_address, get_address_from_public_key_hash from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode from hathor.exception import InvalidNewTransaction from hathor.feature_activation.feature import Feature @@ -25,6 +26,7 @@ from hathor.nanocontracts.types import BlueprintId from hathor.transaction import Block, Transaction, Vertex from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.scripts import P2PKH, Opcode from hathor_tests import unittest from hathor_tests.dag_builder.builder import TestDAGBuilder @@ -49,7 +51,21 @@ def setUp(self) -> None: evaluation_interval=4, default_threshold=3, features={ + Feature.OPCODES_V2: Criteria( + bit=0, + start_height=4, + timeout_height=12, + signal_support_by_default=True, + version='0.0.0' + ), Feature.NANO_CONTRACTS: Criteria( + bit=1, + start_height=4, + timeout_height=12, + signal_support_by_default=True, + version='0.0.0' + ), + Feature.FEE_TOKENS: Criteria( bit=2, start_height=4, timeout_height=12, @@ -60,7 +76,9 @@ def setUp(self) -> None: ) settings = self._settings._replace( - ENABLE_NANO_CONTRACTS=NanoContractsSetting.FEATURE_ACTIVATION, + ENABLE_NANO_CONTRACTS=FeatureSetting.FEATURE_ACTIVATION, + ENABLE_FEE_BASED_TOKENS=FeatureSetting.FEATURE_ACTIVATION, + ENABLE_OPCODES_V2=FeatureSetting.FEATURE_ACTIVATION, FEATURE_ACTIVATION=feature_settings, ) daa = DifficultyAdjustmentAlgorithm(settings=self._settings, test_mode=TestMode.TEST_ALL_WEIGHT) @@ -80,7 +98,7 @@ def setUp(self) -> None: empty_block_storage.commit() self.empty_root_id = empty_block_storage.get_root_id() - def test_activation(self) -> None: + async def test_activation(self) -> None: private_key = unittest.OCB_TEST_PRIVKEY.hex() password = unittest.OCB_TEST_PASSWORD.hex() artifacts = self.dag_builder.build_from_str(f''' @@ -98,10 +116,13 @@ def test_activation(self) -> None: FBT.token_version = fee FBT.fee = 1 HTR - tx1.out[0] = 123 FBT - tx1.fee = 1 HTR + fee_tx.out[0] = 123 FBT + fee_tx.fee = 1 HTR + + op_v2_a.out[0] <<< op_v2_b + op_v2_b <-- b11 - b12 < nc1 < ocb1 < FBT < tx1 < b13 < a11 + b10 < op_v2_a < op_v2_b < b11 < b12 < nc1 < ocb1 < FBT < fee_tx < b13 < a11 nc1 <-- b13 ocb1 <-- b13 @@ -116,13 +137,40 @@ def test_activation(self) -> None: ('b3', 'b4', 'b7', 'b8', 'b11', 'b12', 'b13', 'a11', 'a12', 'a13'), Block, ) - nc1, ocb1, fbt, tx1 = artifacts.get_typed_vertices(('nc1', 'ocb1', 'FBT', 'tx1'), Transaction) + nc1, ocb1, fbt, fee_tx, op_v2_a, op_v2_b = artifacts.get_typed_vertices( + ('nc1', 'ocb1', 'FBT', 'fee_tx', 'op_v2_a', 'op_v2_b'), + Transaction, + ) + + # Setup txs for testing OPCODES_V2. + assert len(op_v2_b.outputs) == 1 + op_v2_b_out = op_v2_b.outputs[0] + p2pkh = P2PKH.parse_script(op_v2_b_out.script) + assert p2pkh is not None + op_v2_address = decode_address(p2pkh.address) + + # This is a custom script that uses one of the deprecated opcodes and will end with 1 on the stack. + assert len(op_v2_b.inputs) == 1 + op_v2_b_in = op_v2_b.inputs[0] + op_v2_b_in.data = bytes([ + 0x19, + *get_address_from_public_key_hash(op_v2_address[1:-4]), + Opcode.OP_FIND_P2PKH, + ]) + + assert op_v2_b_in.tx_id == op_v2_a.hash + op_v2_a_out = op_v2_a.outputs[op_v2_b_in.index] + op_v2_a_out.script = b'' # Empty script so op_v2_b can spend it with the custom script. artifacts.propagate_with(self.manager, up_to='b3') assert self.feature_service.get_state(block=b3, feature=Feature.NANO_CONTRACTS) == FeatureState.DEFINED + assert self.feature_service.get_state(block=b3, feature=Feature.FEE_TOKENS) == FeatureState.DEFINED + assert self.feature_service.get_state(block=b3, feature=Feature.OPCODES_V2) == FeatureState.DEFINED artifacts.propagate_with(self.manager, up_to='b4') assert self.feature_service.get_state(block=b4, feature=Feature.NANO_CONTRACTS) == FeatureState.STARTED + assert self.feature_service.get_state(block=b4, feature=Feature.FEE_TOKENS) == FeatureState.STARTED + assert self.feature_service.get_state(block=b4, feature=Feature.OPCODES_V2) == FeatureState.STARTED signaling_blocks = ('b5', 'b6', 'b7') for block_name in signaling_blocks: @@ -133,28 +181,52 @@ def test_activation(self) -> None: artifacts.propagate_with(self.manager, up_to=block_name) assert self.feature_service.get_state(block=b7, feature=Feature.NANO_CONTRACTS) == FeatureState.STARTED + assert self.feature_service.get_state(block=b7, feature=Feature.FEE_TOKENS) == FeatureState.STARTED + assert self.feature_service.get_state(block=b7, feature=Feature.OPCODES_V2) == FeatureState.STARTED artifacts.propagate_with(self.manager, up_to='b8') assert self.feature_service.get_state(block=b8, feature=Feature.NANO_CONTRACTS) == FeatureState.LOCKED_IN + assert self.feature_service.get_state(block=b8, feature=Feature.FEE_TOKENS) == FeatureState.LOCKED_IN + assert self.feature_service.get_state(block=b8, feature=Feature.OPCODES_V2) == FeatureState.LOCKED_IN + + artifacts.propagate_with(self.manager, up_to='op_v2_a') + + # At this point the OPCODES_V2 feature is not active, + # but deprecated opcodes are already rejected on the mempool + msg = 'full validation failed: unknown opcode: 208' + with pytest.raises(InvalidNewTransaction, match=msg): + self.vertex_handler.on_new_relayed_vertex(op_v2_b) + assert op_v2_b.get_metadata().validation.is_initial() + assert op_v2_b.get_metadata().voided_by is None + + # However, deprecated opcodes would be accepted if relayed inside a block. + # We have to manually propagate it. + d = self.vertex_handler.on_new_block(b11, deps=[op_v2_b]) + self.clock.advance(1) + assert d.called and d.result is True + artifacts._last_propagated = 'b11' - artifacts.propagate_with(self.manager, up_to='b11') assert self.feature_service.get_state(block=b11, feature=Feature.NANO_CONTRACTS) == FeatureState.LOCKED_IN + assert self.feature_service.get_state(block=b11, feature=Feature.FEE_TOKENS) == FeatureState.LOCKED_IN + assert self.feature_service.get_state(block=b11, feature=Feature.OPCODES_V2) == FeatureState.LOCKED_IN assert b11.get_metadata().nc_block_root_id == self.empty_root_id - # At this point, the feature is not active, so the nc and fee txs are rejected on the mempool. + # At this point the nano feature is not active, so nano header is rejected on the mempool msg = 'full validation failed: Header `NanoHeader` not supported by `Transaction`' with pytest.raises(InvalidNewTransaction, match=msg): self.vertex_handler.on_new_relayed_vertex(nc1) assert nc1.get_metadata().validation.is_initial() assert nc1.get_metadata().voided_by is None + # At this point the nano feature is not active, so OCB is rejected on the mempool msg = 'full validation failed: invalid vertex version: 6' with pytest.raises(InvalidNewTransaction, match=msg): self.vertex_handler.on_new_relayed_vertex(ocb1) assert ocb1.get_metadata().validation.is_initial() assert ocb1.get_metadata().voided_by is None + # At this point the fee feature is not active, so fee header is rejected on the mempool msg = 'full validation failed: Header `FeeHeader` not supported by `TokenCreationTransaction`' with pytest.raises(InvalidNewTransaction, match=msg): self.vertex_handler.on_new_relayed_vertex(fbt) @@ -163,6 +235,8 @@ def test_activation(self) -> None: artifacts.propagate_with(self.manager, up_to='b12') assert self.feature_service.get_state(block=b12, feature=Feature.NANO_CONTRACTS) == FeatureState.ACTIVE + assert self.feature_service.get_state(block=b12, feature=Feature.FEE_TOKENS) == FeatureState.ACTIVE + assert self.feature_service.get_state(block=b12, feature=Feature.OPCODES_V2) == FeatureState.ACTIVE assert b11.get_metadata().nc_block_root_id == self.empty_root_id assert b12.get_metadata().nc_block_root_id == self.empty_root_id @@ -180,9 +254,9 @@ def test_activation(self) -> None: assert fbt.get_metadata().validation.is_valid() assert fbt.get_metadata().voided_by is None - artifacts.propagate_with(self.manager, up_to='tx1') - assert tx1.get_metadata().validation.is_valid() - assert tx1.get_metadata().voided_by is None + artifacts.propagate_with(self.manager, up_to='fee_tx') + assert fee_tx.get_metadata().validation.is_valid() + assert fee_tx.get_metadata().voided_by is None artifacts.propagate_with(self.manager, up_to='b13') assert nc1.get_metadata().nc_execution == NCExecutionState.SUCCESS @@ -191,36 +265,44 @@ def test_activation(self) -> None: assert b12.get_metadata().nc_block_root_id == self.empty_root_id assert b13.get_metadata().nc_block_root_id not in (self.empty_root_id, None) + # A reorg happens, decreasing the best chain. artifacts.propagate_with(self.manager, up_to='a11') assert a11.get_metadata().validation.is_valid() assert a11.get_metadata().voided_by is None - assert b11.get_metadata().voided_by == {b11.hash} - assert b12.get_metadata().voided_by == {b12.hash} + assert b11.get_metadata().validation.is_invalid() + assert b12.get_metadata().validation.is_invalid() assert b13.get_metadata().validation.is_invalid() assert nc1.get_metadata().validation.is_invalid() assert ocb1.get_metadata().validation.is_invalid() assert fbt.get_metadata().validation.is_invalid() - assert tx1.get_metadata().validation.is_invalid() + assert fee_tx.get_metadata().validation.is_invalid() + assert op_v2_b.get_metadata().validation.is_invalid() assert b11.get_metadata().nc_block_root_id == self.empty_root_id assert b12.get_metadata().nc_block_root_id == self.empty_root_id assert b13.get_metadata().nc_block_root_id not in (self.empty_root_id, None) assert a11.get_metadata().nc_block_root_id == self.empty_root_id - # The nc and fee txs are removed from the mempool. + # The nc, fee, and deprecated opcodes txs are removed from the mempool. + assert not self.manager.tx_storage.transaction_exists(b11.hash) + assert not self.manager.tx_storage.transaction_exists(b12.hash) assert not self.manager.tx_storage.transaction_exists(b13.hash) assert not self.manager.tx_storage.transaction_exists(nc1.hash) assert not self.manager.tx_storage.transaction_exists(ocb1.hash) assert not self.manager.tx_storage.transaction_exists(fbt.hash) - assert not self.manager.tx_storage.transaction_exists(tx1.hash) + assert not self.manager.tx_storage.transaction_exists(fee_tx.hash) + assert not self.manager.tx_storage.transaction_exists(op_v2_b.hash) assert nc1 not in list(self.manager.tx_storage.iter_mempool_tips()) assert ocb1 not in list(self.manager.tx_storage.iter_mempool_tips()) assert fbt not in list(self.manager.tx_storage.iter_mempool_tips()) - assert tx1 not in list(self.manager.tx_storage.iter_mempool_tips()) + assert fee_tx not in list(self.manager.tx_storage.iter_mempool_tips()) + assert op_v2_b not in list(self.manager.tx_storage.iter_mempool_tips()) - # The nc and fee txs are re-accepted on the mempool. + # The feature states re-activate. artifacts.propagate_with(self.manager, up_to='a12') assert self.feature_service.get_state(block=a12, feature=Feature.NANO_CONTRACTS) == FeatureState.ACTIVE + assert self.feature_service.get_state(block=a12, feature=Feature.FEE_TOKENS) == FeatureState.ACTIVE + assert self.feature_service.get_state(block=a12, feature=Feature.OPCODES_V2) == FeatureState.ACTIVE assert b11.get_metadata().nc_block_root_id == self.empty_root_id assert b12.get_metadata().nc_block_root_id == self.empty_root_id @@ -228,6 +310,7 @@ def test_activation(self) -> None: assert a11.get_metadata().nc_block_root_id == self.empty_root_id assert a12.get_metadata().nc_block_root_id == self.empty_root_id + # The nc and fee txs are re-accepted on the mempool. self._reset_vertex(nc1) self.vertex_handler.on_new_relayed_vertex(nc1) assert nc1.get_metadata().validation.is_valid() @@ -249,12 +332,12 @@ def test_activation(self) -> None: assert self.manager.tx_storage.transaction_exists(fbt.hash) assert fbt in list(self.manager.tx_storage.iter_mempool_tips()) - self._reset_vertex(tx1) - self.vertex_handler.on_new_relayed_vertex(tx1) - assert tx1.get_metadata().validation.is_valid() - assert tx1.get_metadata().voided_by is None - assert self.manager.tx_storage.transaction_exists(tx1.hash) - assert tx1 in list(self.manager.tx_storage.iter_mempool_tips()) + self._reset_vertex(fee_tx) + self.vertex_handler.on_new_relayed_vertex(fee_tx) + assert fee_tx.get_metadata().validation.is_valid() + assert fee_tx.get_metadata().voided_by is None + assert self.manager.tx_storage.transaction_exists(fee_tx.hash) + assert fee_tx in list(self.manager.tx_storage.iter_mempool_tips()) artifacts.propagate_with(self.manager, up_to='a13') diff --git a/hathor_tests/nanocontracts/test_fee_tokens.py b/hathor_tests/nanocontracts/test_fee_tokens.py index b3ac950b7..9d470bc91 100644 --- a/hathor_tests/nanocontracts/test_fee_tokens.py +++ b/hathor_tests/nanocontracts/test_fee_tokens.py @@ -201,7 +201,7 @@ def test_postponed_verification_fail_with_dbt(self) -> None: reason='InputOutputMismatch: Fee amount is different than expected. (amount=1, expected=0)', ) - def test_postponed_verification_fail_less_htr_balance(self) -> None: + def test_postponed_verification_fail_melt_htr(self) -> None: artifacts = self.dag_builder.build_from_str(f''' blockchain genesis b[1..12] b10 < dummy @@ -241,16 +241,19 @@ def test_postponed_verification_fail_less_htr_balance(self) -> None: assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS assert tx1.get_metadata().voided_by is None - # Verification of minting HTR is not postponed, so it fails in verification-time. - with pytest.raises(Exception) as e: - artifacts.propagate_with(self.manager, up_to='tx2') + artifacts.propagate_with(self.manager, up_to='b12') + assert tx2.get_metadata().first_block == b12.hash + assert tx2.get_metadata().nc_execution == NCExecutionState.FAILURE + assert tx2.get_metadata().voided_by == {NC_EXECUTION_FAIL_ID, tx2.hash} - assert isinstance(e.value.__cause__, InvalidNewTransaction) - assert e.value.__cause__.args[0] == ( - 'full validation failed: HTR balance is different than expected. (amount=-1000, expected=0)' + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx2.hash, + block_id=b12.hash, + reason='InputOutputMismatch: There\'s an invalid deficit of HTR. (amount=-1000, expected=0)', ) - def test_postponed_verification_fail_more_htr_balance(self) -> None: + def test_postponed_verification_fail_mint_htr(self) -> None: artifacts = self.dag_builder.build_from_str(f''' blockchain genesis b[1..12] b10 < dummy @@ -263,7 +266,7 @@ def test_postponed_verification_fail_more_htr_balance(self) -> None: tx2.nc_method = create_fee_token() tx2.fee = 1 HTR - tx1 < tx2 + tx1 < b11 < tx2 tx1 <-- b11 tx2 <-- b12 ''') @@ -288,16 +291,13 @@ def test_postponed_verification_fail_more_htr_balance(self) -> None: assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS assert tx1.get_metadata().voided_by is None - artifacts.propagate_with(self.manager, up_to='b12') - assert tx2.get_metadata().first_block == b12.hash - assert tx2.get_metadata().nc_execution == NCExecutionState.FAILURE - assert tx2.get_metadata().voided_by == {NC_EXECUTION_FAIL_ID, tx2.hash} + # Verification of minting HTR is not postponed, so it fails in verification-time. + with pytest.raises(Exception) as e: + artifacts.propagate_with(self.manager, up_to='tx2') - assert_nc_failure_reason( - manager=self.manager, - tx_id=tx2.hash, - block_id=b12.hash, - reason='InputOutputMismatch: HTR balance is different than expected. (amount=1000, expected=0)', + assert isinstance(e.value.__cause__, InvalidNewTransaction) + assert e.value.__cause__.args[0] == ( + 'full validation failed: There\'s an invalid surplus of HTR. (amount=1000, expected=0)' ) def test_postponed_verification_pay_fee_with_fbt(self) -> None: diff --git a/hathor_tests/nanocontracts/test_indexes2.py b/hathor_tests/nanocontracts/test_indexes2.py index f12aeaaac..c865fa10d 100644 --- a/hathor_tests/nanocontracts/test_indexes2.py +++ b/hathor_tests/nanocontracts/test_indexes2.py @@ -31,8 +31,6 @@ def initialize(self, ctx: Context, amount: int) -> None: class TestIndexes2(BlueprintTestCase): def setUp(self) -> None: super().setUp() - - assert self.manager.tx_storage.indexes is not None assert self.manager.tx_storage.indexes.tokens is not None self.tokens_index = self.manager.tx_storage.indexes.tokens diff --git a/hathor_tests/nanocontracts/test_nanocontract.py b/hathor_tests/nanocontracts/test_nanocontract.py index 2ca8c3271..aab9d722e 100644 --- a/hathor_tests/nanocontracts/test_nanocontract.py +++ b/hathor_tests/nanocontracts/test_nanocontract.py @@ -1,4 +1,5 @@ from typing import Any +from unittest.mock import Mock import pytest from cryptography.hazmat.primitives import hashes @@ -39,6 +40,7 @@ from hathor.transaction.scripts import P2PKH, HathorScript, Opcode from hathor.transaction.validation_state import ValidationState from hathor.verification.nano_header_verifier import MAX_NC_SCRIPT_SIGOPS_COUNT, MAX_NC_SCRIPT_SIZE +from hathor.verification.verification_params import VerificationParams from hathor.wallet import KeyPair from hathor_tests import unittest @@ -84,6 +86,8 @@ def setUp(self) -> None: self.genesis = self.peer.tx_storage.get_all_genesis() self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] + self.verification_params = VerificationParams.default_for_mempool(best_block=Mock()) + def _create_nc( self, nc_id: VertexId, @@ -172,7 +176,7 @@ def test_serialization_skip_signature(self) -> None: def test_verify_signature_success(self) -> None: nc = self._get_nc() nc.clear_sighash_cache() - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_fails_nc_id(self) -> None: nc = self._get_nc() @@ -180,7 +184,7 @@ def test_verify_signature_fails_nc_id(self) -> None: nano_header.nc_id = b'a' * 32 nc.clear_sighash_cache() with self.assertRaises(NCInvalidSignature): - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_fails_nc_method(self) -> None: nc = self._get_nc() @@ -188,7 +192,7 @@ def test_verify_signature_fails_nc_method(self) -> None: nano_header.nc_method = 'other_nc_method' nc.clear_sighash_cache() with self.assertRaises(NCInvalidSignature): - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_fails_nc_args_bytes(self) -> None: nc = self._get_nc() @@ -196,7 +200,7 @@ def test_verify_signature_fails_nc_args_bytes(self) -> None: nano_header.nc_args_bytes = b'other_nc_args_bytes' nc.clear_sighash_cache() with self.assertRaises(NCInvalidSignature): - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_fails_invalid_nc_address(self) -> None: nc = self._get_nc() @@ -204,7 +208,7 @@ def test_verify_signature_fails_invalid_nc_address(self) -> None: nano_header.nc_address = b'invalid-address' nc.clear_sighash_cache() with pytest.raises(NCInvalidSignature, match=f'invalid address: {nano_header.nc_address.hex()}'): - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_fails_invalid_nc_script(self) -> None: nc = self._get_nc() @@ -212,7 +216,7 @@ def test_verify_signature_fails_invalid_nc_script(self) -> None: nano_header.nc_script = b'invalid-script' nc.clear_sighash_cache() with pytest.raises(InvalidScriptError, match='Invalid Opcode'): - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_fails_wrong_nc_address(self) -> None: key = KeyPair.create(b'xyz') @@ -225,7 +229,7 @@ def test_verify_signature_fails_wrong_nc_address(self) -> None: nano_header.nc_address = get_address_from_public_key_bytes(pubkey_bytes) nc.clear_sighash_cache() with pytest.raises(NCInvalidSignature) as e: - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) assert isinstance(e.value.__cause__, EqualVerifyFailed) def test_verify_signature_fails_wrong_pubkey(self) -> None: @@ -244,7 +248,7 @@ def test_verify_signature_fails_wrong_pubkey(self) -> None: nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) # First, it's passing with the key from above - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) # We change the script to use a new pubkey, but with the same signature key = KeyPair.create(b'wrong') @@ -254,7 +258,7 @@ def test_verify_signature_fails_wrong_pubkey(self) -> None: nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) with pytest.raises(NCInvalidSignature) as e: - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) assert isinstance(e.value.__cause__, EqualVerifyFailed) def test_verify_signature_fails_wrong_signature(self) -> None: @@ -273,7 +277,7 @@ def test_verify_signature_fails_wrong_signature(self) -> None: nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) # First, it's passing with the key from above - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) # We change the script to use a new signature, but with the same pubkey key = KeyPair.create(b'wrong') @@ -282,7 +286,7 @@ def test_verify_signature_fails_wrong_signature(self) -> None: nano_header.nc_script = P2PKH.create_input_data(public_key_bytes=pubkey_bytes, signature=signature) with pytest.raises(NCInvalidSignature) as e: - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) assert isinstance(e.value.__cause__, FinalStackInvalid) assert 'Stack left with False value' in e.value.__cause__.args[0] @@ -292,7 +296,7 @@ def test_verify_signature_fails_nc_script_too_large(self) -> None: nano_header.nc_script = b'\x00' * (MAX_NC_SCRIPT_SIZE + 1) with pytest.raises(NCInvalidSignature, match='nc_script larger than max: 1025 > 1024'): - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_fails_nc_script_too_many_sigops(self) -> None: nc = self._get_nc() @@ -305,7 +309,7 @@ def test_verify_signature_fails_nc_script_too_many_sigops(self) -> None: nano_header.nc_script = script.data with pytest.raises(TooManySigOps, match='sigops count greater than max: 21 > 20'): - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) def test_verify_signature_multisig(self) -> None: nc = self._get_nc() @@ -332,7 +336,7 @@ def test_verify_signature_multisig(self) -> None: sign_privkeys=[keys[0][0]], ) with pytest.raises(NCInvalidSignature) as e: - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) assert isinstance(e.value.__cause__, MissingStackItems) assert e.value.__cause__.args[0] == 'OP_CHECKMULTISIG: not enough signatures on the stack' @@ -345,7 +349,7 @@ def test_verify_signature_multisig(self) -> None: sign_privkeys=[KeyPair.create(b'invalid').get_private_key(b'invalid')], ) with pytest.raises(NCInvalidSignature) as e: - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) assert isinstance(e.value.__cause__, FinalStackInvalid) assert 'Stack left with False value' in e.value.__cause__.args[0] @@ -357,13 +361,13 @@ def test_verify_signature_multisig(self) -> None: redeem_pubkey_bytes=redeem_pubkey_bytes, sign_privkeys=[x[0] for x in keys[:2]], ) - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) # Test fails because the address was changed nc.clear_sighash_cache() nano_header.nc_address = decode_address(self.peer.wallet.get_unused_address()) with pytest.raises(NCInvalidSignature) as e: - self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc) + self.peer.verification_service.verifiers.nano_header.verify_nc_signature(nc, self.verification_params) assert isinstance(e.value.__cause__, EqualVerifyFailed) def test_get_related_addresses(self) -> None: diff --git a/hathor_tests/nanocontracts/test_sorter_determinism.py b/hathor_tests/nanocontracts/test_sorter_determinism.py new file mode 100644 index 000000000..980ce26d2 --- /dev/null +++ b/hathor_tests/nanocontracts/test_sorter_determinism.py @@ -0,0 +1,34 @@ +import os +import subprocess +import sys +from pathlib import Path + + +def run_in_subprocess(pythonhashseed: str) -> str: + env = os.environ.copy() + env["PYTHONHASHSEED"] = pythonhashseed + + # Add project root to PYTHONPATH so subprocess can import hathor + current_dir = Path(__file__).parent + project_root = current_dir.parent.parent # Go up 2 levels to hathor-core root + env["PYTHONPATH"] = str(project_root) + os.pathsep + env.get("PYTHONPATH", "") + + script_path = current_dir / 'sorter_determinism.py' + + proc = subprocess.run( + [sys.executable, script_path], + env=env, + capture_output=True, + text=True, + check=True, + ) + return proc.stdout.strip() + + +def test_algorithm_is_deterministic_across_pythonhashseed(): + results = set() + for hseed in range(20): + print('Running...', hseed) + out = run_in_subprocess(str(hseed)) + results.add(out) + assert len(results) == 1 diff --git a/hathor_tests/nanocontracts/test_sorter_determinism2.py b/hathor_tests/nanocontracts/test_sorter_determinism2.py new file mode 100644 index 000000000..d1fac0065 --- /dev/null +++ b/hathor_tests/nanocontracts/test_sorter_determinism2.py @@ -0,0 +1,144 @@ +from hathor.nanocontracts.sorter.random_sorter import NCBlockSorter, SorterNode + + +def test_random_sorter_stable_order() -> None: + seed = bytes.fromhex('0ccf87ef1e7307c3017413ce2477df54ed31d396792f0bfbef93aa7741949f2b') + + nc_hashes = set(bytes.fromhex(i) for i in [ + '0000142cf4351face7ff5803117f6d4c0375b0b724c576f7ffcbea7058fa9470', + '00000fb45c8eeecbe2bc5ab69f8a1f88081a7739c813b7accefbf4a13ac5e37a', + '00004151b4a5eed517d225da4be498ec29c3f61ecf1b72766a16ab952610af1b', + '000049ba9ba45cf8dccaed7d05b8a383ca392b9329866531da9c45960e699f26', + '00000e468cc227afe3999df597c49fa37ba527c2a6e2cdf1b9cfe3df67835cab', + '00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea', + ]) + + db = [ + SorterNode( + id=bytes.fromhex('000049ba9ba45cf8dccaed7d05b8a383ca392b9329866531da9c45960e699f26'), + outgoing_edges={ + bytes.fromhex('000003e0baf17eee5a25aa0ccf36eb331a05818c87bc1c316f54485aa974c485'), + b'dummy:2', + }, + incoming_edges=set(), + ), + SorterNode( + id=bytes.fromhex('000003e0baf17eee5a25aa0ccf36eb331a05818c87bc1c316f54485aa974c485'), + outgoing_edges=set(), + incoming_edges={ + bytes.fromhex('0000142cf4351face7ff5803117f6d4c0375b0b724c576f7ffcbea7058fa9470'), + bytes.fromhex('00000fb45c8eeecbe2bc5ab69f8a1f88081a7739c813b7accefbf4a13ac5e37a'), + bytes.fromhex('00004151b4a5eed517d225da4be498ec29c3f61ecf1b72766a16ab952610af1b'), + bytes.fromhex('000049ba9ba45cf8dccaed7d05b8a383ca392b9329866531da9c45960e699f26'), + bytes.fromhex('00000e468cc227afe3999df597c49fa37ba527c2a6e2cdf1b9cfe3df67835cab'), + bytes.fromhex('00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea'), + }, + ), + SorterNode( + id=bytes.fromhex('0000142cf4351face7ff5803117f6d4c0375b0b724c576f7ffcbea7058fa9470'), + outgoing_edges={ + bytes.fromhex('000003e0baf17eee5a25aa0ccf36eb331a05818c87bc1c316f54485aa974c485'), + b'dummy:2', + }, + incoming_edges=set() + ), + SorterNode( + id=bytes.fromhex('00004151b4a5eed517d225da4be498ec29c3f61ecf1b72766a16ab952610af1b'), + outgoing_edges={ + bytes.fromhex('000003e0baf17eee5a25aa0ccf36eb331a05818c87bc1c316f54485aa974c485'), + b'dummy:2', + }, + incoming_edges=set(), + ), + SorterNode( + id=bytes.fromhex('00000fb45c8eeecbe2bc5ab69f8a1f88081a7739c813b7accefbf4a13ac5e37a'), + outgoing_edges={ + b'dummy:1', + bytes.fromhex('000003e0baf17eee5a25aa0ccf36eb331a05818c87bc1c316f54485aa974c485'), + bytes.fromhex('00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea'), + }, + incoming_edges={ + b'dummy:2', + }, + ), + SorterNode( + id=bytes.fromhex('00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea'), + outgoing_edges={ + bytes.fromhex('00000e468cc227afe3999df597c49fa37ba527c2a6e2cdf1b9cfe3df67835cab'), + b'dummy:0', + bytes.fromhex('000003e0baf17eee5a25aa0ccf36eb331a05818c87bc1c316f54485aa974c485'), + }, + incoming_edges={ + b'dummy:1', + bytes.fromhex('00000fb45c8eeecbe2bc5ab69f8a1f88081a7739c813b7accefbf4a13ac5e37a'), + }, + ), + SorterNode( + id=bytes.fromhex('00000e468cc227afe3999df597c49fa37ba527c2a6e2cdf1b9cfe3df67835cab'), + outgoing_edges={ + bytes.fromhex('000003e0baf17eee5a25aa0ccf36eb331a05818c87bc1c316f54485aa974c485'), + bytes.fromhex('00000717cb78166401aaf2db1a2cae645781bb255efa4ee6b1cf2daa5f390197'), + }, + incoming_edges={ + b'dummy:0', + bytes.fromhex('00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea'), + }, + ), + SorterNode( + id=bytes.fromhex('00000717cb78166401aaf2db1a2cae645781bb255efa4ee6b1cf2daa5f390197'), + outgoing_edges=set(), + incoming_edges={ + bytes.fromhex('00000e468cc227afe3999df597c49fa37ba527c2a6e2cdf1b9cfe3df67835cab'), + }, + ), + SorterNode( + id=b'dummy:0', + outgoing_edges={ + bytes.fromhex('00000e468cc227afe3999df597c49fa37ba527c2a6e2cdf1b9cfe3df67835cab'), + }, + incoming_edges={ + bytes.fromhex('00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea'), + }, + ), + SorterNode( + id=b'dummy:1', + outgoing_edges={ + bytes.fromhex('00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea'), + }, + incoming_edges={ + bytes.fromhex('00000fb45c8eeecbe2bc5ab69f8a1f88081a7739c813b7accefbf4a13ac5e37a'), + }, + ), + SorterNode( + id=b'dummy:2', + outgoing_edges={ + bytes.fromhex('00000fb45c8eeecbe2bc5ab69f8a1f88081a7739c813b7accefbf4a13ac5e37a'), + }, + incoming_edges={ + bytes.fromhex('000049ba9ba45cf8dccaed7d05b8a383ca392b9329866531da9c45960e699f26'), + bytes.fromhex('0000142cf4351face7ff5803117f6d4c0375b0b724c576f7ffcbea7058fa9470'), + bytes.fromhex('00004151b4a5eed517d225da4be498ec29c3f61ecf1b72766a16ab952610af1b'), + }, + ), + ] + + sorter = NCBlockSorter(nc_hashes) + for node in db: + sorter.db[node.id] = node + + # XXX: not strictly necessary, whatever order we set must never change + expected_order = list(bytes.fromhex(i) for i in [ + '00000e468cc227afe3999df597c49fa37ba527c2a6e2cdf1b9cfe3df67835cab', + '00000060e9e2358566ad277e7750a016d09043ab53cc4ce7897e29631f5ad7ea', + '00000fb45c8eeecbe2bc5ab69f8a1f88081a7739c813b7accefbf4a13ac5e37a', + '00004151b4a5eed517d225da4be498ec29c3f61ecf1b72766a16ab952610af1b', + '0000142cf4351face7ff5803117f6d4c0375b0b724c576f7ffcbea7058fa9470', + '000049ba9ba45cf8dccaed7d05b8a383ca392b9329866531da9c45960e699f26', + ]) + order = sorter.generate_random_topological_order(seed) + assert order == expected_order + + # XXX: this is necessary to preserve the consensus of the mainnet + tx1 = bytes.fromhex('0000142cf4351face7ff5803117f6d4c0375b0b724c576f7ffcbea7058fa9470') + tx2 = bytes.fromhex('000049ba9ba45cf8dccaed7d05b8a383ca392b9329866531da9c45960e699f26') + assert order.index(tx2) > order.index(tx1) diff --git a/hathor_tests/nanocontracts/test_storage.py b/hathor_tests/nanocontracts/test_storage.py index 390c9fc57..5f788687c 100644 --- a/hathor_tests/nanocontracts/test_storage.py +++ b/hathor_tests/nanocontracts/test_storage.py @@ -89,7 +89,7 @@ def test_bool_false(self) -> None: def test_tuple(self) -> None: value: NCType[tuple[str, int, set[int], bool]] - value = make_nc_type(tuple[str, int, set[int], bool]) # type: ignore[arg-type] + value = make_nc_type(tuple[str, int, set[int], bool]) self._run_test(('str', 1, {3}, True), value) def test_changes_tracker_delete(self) -> None: diff --git a/hathor_tests/nanocontracts/test_crypto_functions.py b/hathor_tests/nanocontracts/test_utils_functions.py similarity index 85% rename from hathor_tests/nanocontracts/test_crypto_functions.py rename to hathor_tests/nanocontracts/test_utils_functions.py index b961cb137..0b3f41465 100644 --- a/hathor_tests/nanocontracts/test_crypto_functions.py +++ b/hathor_tests/nanocontracts/test_utils_functions.py @@ -19,6 +19,7 @@ from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import ec +from hathor import ContractId from hathor.crypto.util import get_public_key_bytes_compressed from hathor.nanocontracts import Blueprint, Context, NCFail, public, utils as nc_utils, view from hathor_tests.nanocontracts.blueprints.unittest import BlueprintTestCase @@ -37,8 +38,13 @@ def test_sha3(self, data: bytes) -> bytes: def test_verify_ecdsa(self, public_key: bytes, data: bytes, signature: bytes) -> bool: return nc_utils.verify_ecdsa(public_key, data, signature) + @view + def test_json_dumps(self) -> str: + obj = dict(a=[1, 2, 3], b=123, c='abc', d=ContractId(b'\x01' * 32)) + return nc_utils.json_dumps(obj) + -class TestCryptoFunctions(BlueprintTestCase): +class TestUtilsFunctions(BlueprintTestCase): def setUp(self) -> None: super().setUp() @@ -75,3 +81,10 @@ def test_verify_ecdsa_success(self) -> None: signature = private_key.sign(data, ec.ECDSA(hashes.SHA256())) assert self.runner.call_view_method(self.contract_id, 'test_verify_ecdsa', public_key, data, signature) + + def test_json_dumps(self) -> None: + result = self.runner.call_view_method(self.contract_id, 'test_json_dumps') + + assert result == ( + '{"a":[1,2,3],"b":123,"c":"abc","d":"0101010101010101010101010101010101010101010101010101010101010101"}' + ) diff --git a/hathor_tests/nanocontracts/test_voided_contract_serialization.py b/hathor_tests/nanocontracts/test_voided_contract_serialization.py index 692aa19f1..7ce6f214e 100644 --- a/hathor_tests/nanocontracts/test_voided_contract_serialization.py +++ b/hathor_tests/nanocontracts/test_voided_contract_serialization.py @@ -80,4 +80,4 @@ def test_to_json_extended_for_voided_contract_call(self) -> None: stored_call = self.manager.tx_storage.get_transaction(call_tx.hash) data = stored_call.to_json_extended() assert data['nc_id'] == nc_fail.hash_hex - assert data['nc_blueprint_id'] == self.fail_blueprint_id.hex() + assert data['nc_blueprint_id'] == '' diff --git a/hathor_tests/others/test_cli_builder.py b/hathor_tests/others/test_cli_builder.py index b6e236d75..ba13309d7 100644 --- a/hathor_tests/others/test_cli_builder.py +++ b/hathor_tests/others/test_cli_builder.py @@ -8,7 +8,7 @@ from hathor.indexes import RocksDBIndexesManager from hathor.manager import HathorManager from hathor.p2p.sync_version import SyncVersion -from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage +from hathor.transaction.storage import TransactionRocksDBStorage from hathor.wallet import HDWallet, Wallet from hathor_cli.builder import CliBuilder from hathor_cli.run_node_args import RunNodeArgs @@ -50,8 +50,8 @@ def test_empty(self): def test_all_default(self): data_dir = self.mkdtemp() manager = self._build(['--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) - self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsNotNone(manager.tx_storage.cache_data) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) self.assertIsNone(manager.wallet) self.assertEqual('unittests', manager.network) @@ -70,8 +70,8 @@ def test_disable_cache_storage(self): def test_rocksdb_storage(self): data_dir = self.mkdtemp() manager = self._build(['--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) - self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsNotNone(manager.tx_storage.cache_data) self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) def test_sync_default(self): diff --git a/hathor_tests/others/test_init_manager.py b/hathor_tests/others/test_init_manager.py index 56cdbe0db..702232298 100644 --- a/hathor_tests/others/test_init_manager.py +++ b/hathor_tests/others/test_init_manager.py @@ -1,7 +1,9 @@ from typing import Iterator from hathor.conf.settings import HathorSettings +from hathor.indexes import RocksDBIndexesManager from hathor.pubsub import PubSubManager +from hathor.reactor import ReactorProtocol from hathor.simulator.utils import add_new_block, add_new_blocks from hathor.storage import RocksDBStorage from hathor.transaction import BaseTransaction @@ -14,16 +16,19 @@ class ModifiedTransactionRocksDBStorage(TransactionRocksDBStorage): - def __init__(self, path: str, settings: HathorSettings): + def __init__(self, reactor: ReactorProtocol, path: str, settings: HathorSettings): from hathor.nanocontracts.storage import NCRocksDBStorageFactory rocksdb_storage = RocksDBStorage(path=path) nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + indexes = RocksDBIndexesManager(rocksdb_storage=rocksdb_storage, settings=settings) super().__init__( + reactor=reactor, rocksdb_storage=rocksdb_storage, settings=settings, vertex_parser=VertexParser(settings=settings), nc_storage_factory=nc_storage_factory, vertex_children_service=RocksDBVertexChildrenService(rocksdb_storage), + indexes=indexes, ) self._first_tx: BaseTransaction | None = None @@ -44,7 +49,9 @@ class SimpleManagerInitializationTestCase(unittest.TestCase): def setUp(self): super().setUp() self.path = self.mkdtemp() - self.tx_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + self.tx_storage = ModifiedTransactionRocksDBStorage( + reactor=self.reactor, path=self.path, settings=self._settings + ) self.pubsub = PubSubManager(self.clock) def test_invalid_arguments(self): @@ -104,7 +111,9 @@ class ManagerInitializationTestCase(unittest.TestCase): def setUp(self): super().setUp() self.path = self.mkdtemp() - self.tx_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + self.tx_storage = ModifiedTransactionRocksDBStorage( + reactor=self.reactor, path=self.path, settings=self._settings + ) self.network = 'testnet' self.manager = self.create_peer(self.network, tx_storage=self.tx_storage) @@ -145,7 +154,7 @@ def test_init_good_order(self): # a new manager must be successfully initialized self.manager.stop() self.tx_storage._rocksdb_storage.close() - new_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + new_storage = ModifiedTransactionRocksDBStorage(reactor=self.reactor, path=self.path, settings=self._settings) artifacts = self.get_builder().set_tx_storage(new_storage).build() artifacts.manager.start() self.clock.run() @@ -170,7 +179,7 @@ def test_init_unfavorable_order(self): # a new manager must be successfully initialized self.manager.stop() self.tx_storage._rocksdb_storage.close() - new_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + new_storage = ModifiedTransactionRocksDBStorage(reactor=self.reactor, path=self.path, settings=self._settings) artifacts = self.get_builder().set_tx_storage(new_storage).build() artifacts.manager.start() self.clock.run() @@ -206,7 +215,7 @@ def test_init_not_voided_tips(self): # create a new manager (which will initialize in the self.create_peer call) self.manager.stop() self.tx_storage._rocksdb_storage.close() - new_storage = ModifiedTransactionRocksDBStorage(path=self.path, settings=self._settings) + new_storage = ModifiedTransactionRocksDBStorage(reactor=self.reactor, path=self.path, settings=self._settings) artifacts = self.get_builder().set_tx_storage(new_storage).build() manager = artifacts.manager manager.start() diff --git a/hathor_tests/others/test_metrics.py b/hathor_tests/others/test_metrics.py index ad332284b..8c0e0e1b2 100644 --- a/hathor_tests/others/test_metrics.py +++ b/hathor_tests/others/test_metrics.py @@ -1,6 +1,7 @@ import tempfile from unittest.mock import Mock +from hathor.indexes import RocksDBIndexesManager from hathor.manager import HathorManager from hathor.p2p.manager import PeerConnectionsMetrics from hathor.p2p.peer import PrivatePeer @@ -8,7 +9,8 @@ from hathor.p2p.protocol import HathorProtocol from hathor.pubsub import HathorEvents from hathor.simulator.utils import add_new_blocks -from hathor.transaction.storage import TransactionCacheStorage, TransactionRocksDBStorage +from hathor.transaction.storage import TransactionRocksDBStorage +from hathor.transaction.storage.rocksdb_storage import CacheConfig from hathor.transaction.vertex_children import RocksDBVertexChildrenService from hathor.transaction.vertex_parser import VertexParser from hathor.wallet import Wallet @@ -183,9 +185,9 @@ def _init_manager(path: tempfile.TemporaryDirectory | None = None) -> HathorMana # XXX: I had to close the DB and reinitialize the classes to force a flush of RocksDB memtables to disk # But I think we could do this in a better way if we had a python-binding for this Flush method in # https://github.com/facebook/rocksdb/blob/v7.5.3/include/rocksdb/db.h#L1396 - manager.tx_storage.store._db.close() + manager.tx_storage._db.close() - manager = _init_manager(manager.tx_storage.store._rocksdb_storage.temp_dir) + manager = _init_manager(manager.tx_storage._rocksdb_storage.temp_dir) manager.metrics._collect_data() # We don't know exactly the sizes of each column family, @@ -248,7 +250,7 @@ def build_hathor_protocol(): def test_cache_data_collection(self): """Test if cache-related data is correctly being collected from the - TransactionCacheStorage + TransactionRocksDBStorage """ from hathor.nanocontracts.storage import NCRocksDBStorageFactory @@ -256,26 +258,22 @@ def test_cache_data_collection(self): rocksdb_storage = self.create_rocksdb_storage() nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) vertex_children_service = RocksDBVertexChildrenService(rocksdb_storage) - base_storage = TransactionRocksDBStorage( + indexes = RocksDBIndexesManager(rocksdb_storage=rocksdb_storage, settings=self._settings) + tx_storage = TransactionRocksDBStorage( + reactor=self.reactor, rocksdb_storage=rocksdb_storage, settings=self._settings, vertex_parser=VertexParser(settings=self._settings), nc_storage_factory=nc_storage_factory, vertex_children_service=vertex_children_service, - ) - tx_storage = TransactionCacheStorage( - base_storage, - self.clock, - indexes=None, - settings=self._settings, - nc_storage_factory=nc_storage_factory, - vertex_children_service=vertex_children_service, + indexes=indexes, + cache_config=CacheConfig(), ) manager = self.create_peer('testnet', tx_storage=tx_storage) - - tx_storage.stats["hit"] = 10 - tx_storage.stats["miss"] = 20 + data = tx_storage.cache_data + data.hit = 10 + data.miss = 20 # Execution manager.metrics._collect_data() diff --git a/hathor_tests/p2p/test_double_spending.py b/hathor_tests/p2p/test_double_spending.py index 1b5e9bb99..a61d01a2d 100644 --- a/hathor_tests/p2p/test_double_spending.py +++ b/hathor_tests/p2p/test_double_spending.py @@ -86,7 +86,6 @@ def test_simple_double_spending(self) -> None: spent_meta = spent_tx.get_metadata() self.assertEqual([tx1.hash, tx2.hash], spent_meta.spent_outputs[txin.index]) - assert self.manager1.tx_storage.indexes.mempool_tips is not None self.assertNotIn(tx1.hash, self.manager1.tx_storage.indexes.mempool_tips.get()) self.assertNotIn(tx2.hash, self.manager1.tx_storage.indexes.mempool_tips.get()) @@ -111,7 +110,6 @@ def test_simple_double_spending(self) -> None: spent_meta = spent_tx.get_metadata() self.assertEqual([tx1.hash, tx2.hash, tx3.hash], spent_meta.spent_outputs[txin.index]) - assert self.manager1.tx_storage.indexes.mempool_tips is not None self.assertNotIn(tx1.hash, self.manager1.tx_storage.indexes.mempool_tips.get()) self.assertNotIn(tx2.hash, self.manager1.tx_storage.indexes.mempool_tips.get()) self.assertIn(tx3.hash, self.manager1.tx_storage.indexes.mempool_tips.get()) diff --git a/hathor_tests/p2p/test_split_brain.py b/hathor_tests/p2p/test_split_brain.py index a7550d456..9919ce656 100644 --- a/hathor_tests/p2p/test_split_brain.py +++ b/hathor_tests/p2p/test_split_brain.py @@ -134,7 +134,7 @@ def test_split_brain_only_blocks_different_height(self) -> None: # Add one more block to manager1, so it's the winner chain add_new_block(manager1, advance_clock=1) - block_tip1 = not_none(manager1.tx_storage.indexes).height.get_tip() + block_tip1 = manager1.tx_storage.indexes.height.get_tip() self.assertConsensusValid(manager1) self.assertConsensusValid(manager2) @@ -157,8 +157,8 @@ def test_split_brain_only_blocks_different_height(self) -> None: self.assertConsensusValid(manager2) self.assertConsensusEqual(manager1, manager2) - self.assertEqual(block_tip1, not_none(manager1.tx_storage.indexes).height.get_tip()) - self.assertEqual(block_tip1, not_none(manager2.tx_storage.indexes).height.get_tip()) + self.assertEqual(block_tip1, manager1.tx_storage.indexes.height.get_tip()) + self.assertEqual(block_tip1, manager2.tx_storage.indexes.height.get_tip()) def test_split_brain_only_blocks_same_height(self) -> None: manager1 = self.create_peer(self.network, unlock_wallet=True) diff --git a/hathor_tests/p2p/test_sync_v2.py b/hathor_tests/p2p/test_sync_v2.py index 4d0fb7ee0..4cf23dd86 100644 --- a/hathor_tests/p2p/test_sync_v2.py +++ b/hathor_tests/p2p/test_sync_v2.py @@ -3,7 +3,6 @@ from typing import cast from unittest.mock import patch -import pytest from twisted.internet.defer import Deferred, succeed from twisted.python.failure import Failure @@ -24,7 +23,6 @@ from hathor.transaction import Block from hathor.transaction.storage import TransactionRocksDBStorage from hathor.transaction.storage.transaction_storage import TransactionStorage -from hathor.transaction.storage.traversal import DFSWalk from hathor.types import VertexId from hathor.util import not_none from hathor_tests.dag_builder.builder import TestDAGBuilder @@ -152,7 +150,6 @@ def test_restart_fullnode_quick(self) -> None: def test_restart_fullnode_quick_with_cache(self) -> None: self._run_restart_test(use_tx_storage_cache=True) - @pytest.mark.skip(reason='broken') def test_exceeds_streaming_and_mempool_limits(self) -> None: manager1 = self.create_peer() manager1.allow_mining_without_peers() @@ -182,13 +179,7 @@ def test_exceeds_streaming_and_mempool_limits(self) -> None: blk = manager1.tx_storage.get_best_block() tx_parents = [manager1.tx_storage.get_transaction(x) for x in blk.parents[1:]] self.assertEqual(len(tx_parents), 2) - dfs = DFSWalk(manager1.tx_storage, is_dag_verifications=True, is_left_to_right=False) - cnt = 0 - for tx in dfs.run(tx_parents): - if tx.get_metadata().first_block == blk.hash: - cnt += 1 - else: - dfs.skip_neighbors(tx) + cnt = len(list(blk.iter_transactions_in_this_block())) self.assertGreater(cnt, 400) # Generate 500 txs in mempool. @@ -253,8 +244,6 @@ def test_receiving_tips_limit(self) -> None: ''') artifacts.propagate_with(manager1) - assert manager1.tx_storage.indexes is not None - assert manager1.tx_storage.indexes.mempool_tips is not None mempool_tips_count = len(manager1.tx_storage.indexes.mempool_tips.get()) # we should expect at the very least 30 tips self.assertGreater(mempool_tips_count, 30) @@ -390,7 +379,7 @@ def fake_get_peer_block_hashes(heights: list[int]) -> Deferred[list[_HeightInfo] response = [] for h in heights: if h < reorg_height: - index_manager = not_none(conn12.manager2.tx_storage.indexes) + index_manager = conn12.manager2.tx_storage.indexes vertex_id = not_none(index_manager.height.get(h)) else: vertex_id = rng.randbytes(32) @@ -425,7 +414,7 @@ def fake_get_peer_block_hashes(heights: list[int]) -> Deferred[list[_HeightInfo] response = [] for h in heights: if h < reorg_height: - index_manager = not_none(conn12.manager2.tx_storage.indexes) + index_manager = conn12.manager2.tx_storage.indexes vertex_id = not_none(index_manager.height.get(h)) else: vertex_id = rng.randbytes(32) diff --git a/hathor_tests/resources/nanocontracts/my_blueprint.py b/hathor_tests/resources/nanocontracts/my_blueprint.py index e98e56a8e..a8d614711 100644 --- a/hathor_tests/resources/nanocontracts/my_blueprint.py +++ b/hathor_tests/resources/nanocontracts/my_blueprint.py @@ -4,6 +4,7 @@ Address, Amount, Blueprint, + CallerId, Context, SignedData, Timestamp, @@ -30,6 +31,7 @@ class MyBlueprint(Blueprint): a_tuple: tuple[str, int, bool] a_dict_dict_tuple: dict[str, tuple[str, int]] a_optional_int: Optional[int] + a_caller_id: CallerId @public def initialize(self, ctx: Context, arg1: int) -> None: diff --git a/hathor_tests/resources/nanocontracts/test_blueprint.py b/hathor_tests/resources/nanocontracts/test_blueprint.py index 7ed434c9c..71d55945c 100644 --- a/hathor_tests/resources/nanocontracts/test_blueprint.py +++ b/hathor_tests/resources/nanocontracts/test_blueprint.py @@ -71,6 +71,7 @@ def test_success(self) -> Generator[Deferred[Any], Any, None]: 'a_tuple': 'tuple[str, int, bool]', 'a_dict_dict_tuple': 'dict[str, tuple[str, int]]', 'a_optional_int': 'int?', + 'a_caller_id': 'CallerId', }) self.assertEqual(data['public_methods'], { 'initialize': { diff --git a/hathor_tests/resources/nanocontracts/test_history.py b/hathor_tests/resources/nanocontracts/test_history.py index b4e8f022d..072e1fbd2 100644 --- a/hathor_tests/resources/nanocontracts/test_history.py +++ b/hathor_tests/resources/nanocontracts/test_history.py @@ -185,6 +185,20 @@ def test_success(self): ids = [tx['hash'] for tx in data2['history']] self.assertEqual(ids, [tx1.hash.hex(), nc1.hash.hex()]) + # Check ascending order + response_asc = yield self.web.get( + 'history', + { + b'id': nc1.hash.hex().encode('ascii'), + b'order': b'asc', + } + ) + data_asc = response_asc.json_value() + self.assertEqual(data_asc['has_more'], False) + self.assertEqual(len(data_asc['history']), 2) + ids_asc = [tx['hash'] for tx in data_asc['history']] + self.assertEqual(ids_asc, [nc1.hash.hex(), tx1.hash.hex()]) + # Check paging works minimally with after response2a = yield self.web.get( 'history', diff --git a/hathor_tests/resources/nanocontracts/test_history2.py b/hathor_tests/resources/nanocontracts/test_history2.py index d5687323a..71f084334 100644 --- a/hathor_tests/resources/nanocontracts/test_history2.py +++ b/hathor_tests/resources/nanocontracts/test_history2.py @@ -13,7 +13,7 @@ settings = HathorSettings() -class TestBlueprint(Blueprint): +class LogEmitBlueprint(Blueprint): value: int @public @@ -32,7 +32,7 @@ def setUp(self): self.blueprint_id = b'x' * 32 self.catalog = NCBlueprintCatalog({ - self.blueprint_id: TestBlueprint + self.blueprint_id: LogEmitBlueprint }) self.manager = self.create_peer( @@ -107,37 +107,23 @@ def test_include_nc_logs_and_events(self): self.assertEqual(bytes.fromhex(event['data']), b'combined test') # Test NanoContractHistoryResource API - # Test history for nc1 - response = yield self.web_history.get('history', { + # By default, transactions are created with increasing timestamps, so nc2 is newer than nc1. + # Test history for nc1 with default order (desc) + response_desc = yield self.web_history.get('history', { b'id': nc1.hash.hex().encode('ascii'), b'include_nc_logs': b'true', b'include_nc_events': b'true', }) - data = response.json_value() - self.assertTrue(data['success']) - self.assertGreater(len(data['history']), 0) + data_desc = response_desc.json_value() + self.assertTrue(data_desc['success']) + self.assertEqual(len(data_desc['history']), 2) - # Find nc1 in history (it should be the initialize transaction) - nc1_in_history = None - for tx_data in data['history']: - if tx_data['hash'] == nc1.hash_hex: - nc1_in_history = tx_data - break + # Check order (desc), newest first: nc2, then nc1 + self.assertEqual(data_desc['history'][0]['hash'], nc2.hash_hex) + self.assertEqual(data_desc['history'][1]['hash'], nc1.hash_hex) - self.assertIsNotNone(nc1_in_history) - self.assertEqual(nc1_in_history['nc_args_decoded'], [42]) - self.assertIn('nc_logs', nc1_in_history) - self.assertIn('nc_events', nc1_in_history) - self.assertEqual(nc1_in_history['nc_events'], []) - - # Find nc2 in history (log_and_emit transaction) - nc2_in_history = None - for tx_data in data['history']: - if tx_data['hash'] == nc2.hash_hex: - nc2_in_history = tx_data - break - - self.assertIsNotNone(nc2_in_history) + # Check content of nc2 in history + nc2_in_history = data_desc['history'][0] self.assertEqual(nc2_in_history['nc_args_decoded'], ["combined test"]) self.assertIn('nc_logs', nc2_in_history) self.assertIsInstance(nc2_in_history['nc_logs'], dict) @@ -147,3 +133,23 @@ def test_include_nc_logs_and_events(self): self.assertEqual(len(nc2_in_history['nc_events']), 1) event = nc2_in_history['nc_events'][0] self.assertEqual(bytes.fromhex(event['data']), b'combined test') + + # Check content of nc1 in history + nc1_in_history = data_desc['history'][1] + self.assertEqual(nc1_in_history['nc_args_decoded'], [42]) + self.assertIn('nc_logs', nc1_in_history) + self.assertIn('nc_events', nc1_in_history) + self.assertEqual(nc1_in_history['nc_events'], []) + + # Test history for nc1 with asc order + response_asc = yield self.web_history.get('history', { + b'id': nc1.hash.hex().encode('ascii'), + b'order': b'asc', + }) + data_asc = response_asc.json_value() + self.assertTrue(data_asc['success']) + self.assertEqual(len(data_asc['history']), 2) + + # Check order (asc), oldest first: nc1, then nc2 + self.assertEqual(data_asc['history'][0]['hash'], nc1.hash_hex) + self.assertEqual(data_asc['history'][1]['hash'], nc2.hash_hex) diff --git a/hathor_tests/resources/transaction/test_create_tx.py b/hathor_tests/resources/transaction/test_create_tx.py index 8f190a222..fede3089f 100644 --- a/hathor_tests/resources/transaction/test_create_tx.py +++ b/hathor_tests/resources/transaction/test_create_tx.py @@ -314,7 +314,7 @@ def test_invalid_value(self): ] })).json_value() self.assertEqual(resp, { - 'error': 'HTR balance is different than expected. (amount=1, expected=0)' + 'error': 'There\'s an invalid surplus of HTR. (amount=1, expected=0)' }) @inlineCallbacks @@ -334,7 +334,7 @@ def test_invalid_value2(self): ] })).json_value() self.assertEqual(resp, { - 'error': 'HTR balance is different than expected. (amount=-1, expected=0)' + 'error': 'There\'s an invalid deficit of HTR. (amount=-1, expected=0)' }) @inlineCallbacks diff --git a/hathor_tests/resources/transaction/test_pushtx.py b/hathor_tests/resources/transaction/test_pushtx.py index 5cbf9a062..acf665a95 100644 --- a/hathor_tests/resources/transaction/test_pushtx.py +++ b/hathor_tests/resources/transaction/test_pushtx.py @@ -1,4 +1,4 @@ -from typing import Generator, Optional +from typing import Any, Generator, Optional from twisted.internet.defer import inlineCallbacks @@ -10,7 +10,7 @@ from hathor.wallet.base_wallet import WalletInputInfo, WalletOutputInfo from hathor.wallet.resources import SendTokensResource from hathor_tests.resources.base_resource import StubSite, _BaseResourceTest -from hathor_tests.utils import add_blocks_unlock_reward, add_tx_with_data_script, create_tokens +from hathor_tests.utils import add_blocks_unlock_reward, add_tx_with_data_script, create_fee_tokens, create_tokens class BasePushTxTest(_BaseResourceTest._ResourceTest): @@ -23,8 +23,11 @@ def setUp(self): self.web = StubSite(PushTxResource(self.manager)) self.web_tokens = StubSite(SendTokensResource(self.manager, self._settings)) - def get_tx(self, inputs: Optional[list[WalletInputInfo]] = None, - outputs: Optional[list[WalletOutputInfo]] = None) -> Transaction: + def get_tx( + self, + inputs: Optional[list[WalletInputInfo]] = None, + outputs: Optional[list[WalletOutputInfo]] = None + ) -> Transaction: if not outputs: address = self.get_address(0) assert address is not None @@ -69,6 +72,21 @@ def push_tx(self, data=None): args[nk] = nv return self.web.get('push_tx', args) + @inlineCallbacks + def test_push_tx_fee_header(self): + self.manager.wallet.unlock(b'MYPASS') + add_blocks_unlock_reward(self.manager) + address = self.get_address(0) + assert address is not None + tx = create_fee_tokens(self.manager, address_b58=address, propagate=False) + + self.assertTrue(tx.has_fees()) + tx_hex = tx.get_struct().hex() + + response = yield self.push_tx({'hex_tx': tx_hex}) + data = response.json_value() + self.assertTrue(data['success']) + @inlineCallbacks def test_push_tx(self) -> Generator: self.manager.wallet.unlock(b'MYPASS') @@ -77,7 +95,7 @@ def test_push_tx(self) -> Generator: tx = self.get_tx() tx_hex = tx.get_struct().hex() - response = yield self.push_tx({'hex_tx': tx_hex}) + response: Any = yield self.push_tx({'hex_tx': tx_hex}) data = response.json_value() self.assertTrue(data['success']) @@ -90,7 +108,7 @@ def test_push_tx(self) -> Generator: self.manager.cpu_mining_service.resolve(tx) tx_hex = tx.get_struct().hex() - response_success = yield self.push_tx({'hex_tx': tx_hex}) + response_success: Any = yield self.push_tx({'hex_tx': tx_hex}) data_success = response_success.json_value() self.assertFalse(data_success['success']) @@ -118,7 +136,7 @@ def test_push_tx(self) -> Generator: # Invalid tx (don't have inputs) genesis_tx = next(x for x in self.manager.tx_storage.get_all_genesis() if x.is_transaction) genesis_hex = genesis_tx.get_struct().hex() - response_genesis = yield self.push_tx({'tx_hex': genesis_hex}) + response_genesis: Any = yield self.push_tx({'tx_hex': genesis_hex}) data_genesis = response_genesis.json_value() self.assertFalse(data_genesis['success']) @@ -143,14 +161,14 @@ def test_push_nft(self) -> Generator: address = script_type_out.address tx3 = create_tokens(self.manager, address, mint_amount=100, propagate=False, nft_data='test') tx3_hex = tx3.get_struct().hex() - response = yield self.push_tx({'hex_tx': tx3_hex}) + response: Any = yield self.push_tx({'hex_tx': tx3_hex}) data = response.json_value() self.assertTrue(data['success']) @inlineCallbacks def test_invalid_params(self) -> Generator: # Missing hex - response = yield self.push_tx() + response: Any = yield self.push_tx() data = response.json_value() self.assertFalse(data['success']) @@ -165,7 +183,7 @@ def test_invalid_params(self) -> Generator: self.assertFalse(data['success']) # Invalid tx hex - response_error2 = yield self.push_tx({'hex_tx': 'a12c'}) + response_error2: Any = yield self.push_tx({'hex_tx': 'a12c'}) data_error2 = response_error2.json_value() self.assertFalse(data_error2['success']) @@ -180,7 +198,7 @@ def test_script_too_big(self) -> Generator: tx.outputs[0].script = b'*' * (self._settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + 1) self.manager.cpu_mining_service.resolve(tx) tx_hex = tx.get_struct().hex() - response = yield self.push_tx({'hex_tx': tx_hex}) + response: Any = yield self.push_tx({'hex_tx': tx_hex}) data = response.json_value() self.assertFalse(data['success']) self.assertEqual('Transaction is non standard.', data['message']) @@ -196,7 +214,7 @@ def test_non_standard_script(self) -> Generator: tx.outputs[0].script = b'*' * 5 self.manager.cpu_mining_service.resolve(tx) tx_hex = tx.get_struct().hex() - response = yield self.push_tx({'hex_tx': tx_hex}) + response: Any = yield self.push_tx({'hex_tx': tx_hex}) data = response.json_value() self.assertFalse(data['success']) expected = 'Transaction is non standard.' @@ -211,7 +229,7 @@ def test_spending_voided(self) -> Generator: # Push a first tx tx = self.get_tx() tx_hex = tx.get_struct().hex() - response = yield self.push_tx({'hex_tx': tx_hex}) + response: Any = yield self.push_tx({'hex_tx': tx_hex}) data = response.json_value() self.assertTrue(data['success']) @@ -275,7 +293,7 @@ def test_push_standard_script_data(self) -> Generator: tx1 = add_tx_with_data_script(self.manager, ['test'], propagate=False) tx1_hex = tx1.get_struct().hex() - response = yield self.push_tx({'hex_tx': tx1_hex}) + response: Any = yield self.push_tx({'hex_tx': tx1_hex}) data = response.json_value() self.assertTrue(data['success']) diff --git a/hathor_tests/resources/wallet/test_balance.py b/hathor_tests/resources/wallet/test_balance.py index 69740fedf..40bd9725c 100644 --- a/hathor_tests/resources/wallet/test_balance.py +++ b/hathor_tests/resources/wallet/test_balance.py @@ -30,6 +30,7 @@ def test_get(self): cpu_mining_service=CpuMiningService() ) yield self.web_mining.post("mining", {'block_bytes': base64.b64encode(block_bytes).decode('utf-8')}) + self.clock.advance(1) # Get new balance after block response2 = yield self.web.get("wallet/balance") diff --git a/hathor_tests/resources/wallet/test_history.py b/hathor_tests/resources/wallet/test_history.py index d76dee4b3..07673945c 100644 --- a/hathor_tests/resources/wallet/test_history.py +++ b/hathor_tests/resources/wallet/test_history.py @@ -25,6 +25,7 @@ def test_get(self): cpu_mining_service=CpuMiningService() ) yield self.web_mining.post("mining", {'block_bytes': base64.b64encode(block_bytes).decode('utf-8')}) + self.clock.advance(1) # Getting wallet history response = yield self.web.get("wallet/history", {b'page': 1, b'count': 10}) diff --git a/hathor_tests/resources/wallet/test_nano_contract.py b/hathor_tests/resources/wallet/test_nano_contract.py index 7380871d5..938fb3087 100644 --- a/hathor_tests/resources/wallet/test_nano_contract.py +++ b/hathor_tests/resources/wallet/test_nano_contract.py @@ -1,3 +1,4 @@ +import pytest from twisted.internet.defer import inlineCallbacks from hathor.simulator.utils import add_new_blocks @@ -14,6 +15,7 @@ from hathor_tests.utils import add_blocks_unlock_reward +@pytest.mark.skip(reason='old feature, this will be removed') class NanoContractsTest(_BaseResourceTest._ResourceTest): def setUp(self): super().setUp() diff --git a/hathor_tests/simulation/test_simulator.py b/hathor_tests/simulation/test_simulator.py index eb74091e3..bbbe469de 100644 --- a/hathor_tests/simulation/test_simulator.py +++ b/hathor_tests/simulation/test_simulator.py @@ -115,7 +115,7 @@ def test_new_syncing_peer(self) -> None: for hashpower in [10e6, 8e6, 5e6]: manager = self.create_peer() for node in nodes: - conn = FakeConnection(manager, node, latency=0.085) + conn = FakeConnection(manager, node, latency=0.085, autoreconnect=True) self.simulator.add_connection(conn) nodes.append(manager) diff --git a/hathor_tests/tx/test_block.py b/hathor_tests/tx/test_block.py index a4e99d55c..2462466ee 100644 --- a/hathor_tests/tx/test_block.py +++ b/hathor_tests/tx/test_block.py @@ -43,7 +43,7 @@ def test_calculate_feature_activation_bit_counts_genesis(): def tx_storage() -> TransactionStorage: artifacts = TestBuilder().build() storage = artifacts.tx_storage - indexes = not_none(artifacts.indexes) + indexes = artifacts.indexes feature_activation_bits = [ 0b0000, # 0: boundary block 0b1010, diff --git a/hathor_tests/tx/test_cache_storage.py b/hathor_tests/tx/test_cache_storage.py index 0269026e9..dba4e3d97 100644 --- a/hathor_tests/tx/test_cache_storage.py +++ b/hathor_tests/tx/test_cache_storage.py @@ -1,7 +1,12 @@ +from unittest.mock import Mock + +from twisted.internet.task import deferLater + from hathor.daa import TestMode +from hathor.reactor import get_global_reactor from hathor.simulator.utils import add_new_blocks from hathor.transaction import Transaction, TransactionMetadata -from hathor.transaction.storage import TransactionCacheStorage +from hathor.transaction.storage import TransactionRocksDBStorage from hathor_tests import unittest from hathor_tests.utils import add_new_transactions @@ -17,7 +22,8 @@ def setUp(self): .set_wallet(self._create_test_wallet(unlocked=True)) self.manager = self.create_peer_from_builder(builder) self.cache_storage = self.manager.tx_storage - self.assertIsInstance(self.cache_storage, TransactionCacheStorage) + self.assertIsInstance(self.cache_storage, TransactionRocksDBStorage) + self.assertIsNotNone(self.cache_storage.cache_data) self.genesis = self.cache_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] @@ -54,11 +60,11 @@ def test_dirty_set(self): self.cache_storage.save_transaction(tx) for tx in txs: - self.assertIn(tx.hash, self.cache_storage.dirty_txs) + self.assertIn(tx.hash, self.cache_storage.cache_data.dirty_txs) # should flush to disk and empty dirty set - self.cache_storage._flush_to_storage(self.cache_storage.dirty_txs.copy()) - self.assertEqual(0, len(self.cache_storage.dirty_txs)) + self.cache_storage._flush_to_storage(self.cache_storage.cache_data.dirty_txs.copy()) + self.assertEqual(0, len(self.cache_storage.cache_data.dirty_txs)) def test_capacity(self): # cache should not grow over its capacity @@ -66,7 +72,7 @@ def test_capacity(self): for tx in txs: self.cache_storage.save_transaction(tx) - self.assertEqual(CACHE_SIZE, len(self.cache_storage.cache)) + self.assertEqual(CACHE_SIZE, len(self.cache_storage.cache_data.cache)) def test_read_adds_to_cache(self): # make sure reading also adds to cache, not only writes @@ -75,13 +81,13 @@ def test_read_adds_to_cache(self): self.cache_storage.save_transaction(tx) # by now, tx[0] will already have left the cache - self.assertNotIn(txs[0].hash, self.cache_storage.cache) + self.assertNotIn(txs[0].hash, self.cache_storage.cache_data.cache) # read tx self.cache_storage.get_transaction(txs[0].hash) # now it should be in cache - self.assertIn(txs[0].hash, self.cache_storage.cache) + self.assertIn(txs[0].hash, self.cache_storage.cache_data.cache) def test_read_moves_to_end(self): # when we read a tx from cache, it should be moved to the end of cache so it's evicted later @@ -97,7 +103,7 @@ def test_read_moves_to_end(self): self.cache_storage.save_transaction(txs[-1]) # first tx should be in cache - self.assertIn(txs[0].hash, self.cache_storage.cache) + self.assertIn(txs[0].hash, self.cache_storage.cache_data.cache) def test_cache_eviction(self): # tests we're evicting the oldest tx from cache @@ -107,9 +113,9 @@ def test_cache_eviction(self): # next save should evict first tx self.cache_storage.save_transaction(txs[CACHE_SIZE]) - self.assertNotIn(txs[0].hash, self.cache_storage.cache) - self.assertIn(txs[CACHE_SIZE].hash, self.cache_storage.cache) - self.assertEqual(CACHE_SIZE, len(self.cache_storage.cache)) + self.assertNotIn(txs[0].hash, self.cache_storage.cache_data.cache) + self.assertIn(txs[CACHE_SIZE].hash, self.cache_storage.cache_data.cache) + self.assertEqual(CACHE_SIZE, len(self.cache_storage.cache_data.cache)) def test_flush_thread(self): txs = [self._get_new_tx(nonce) for nonce in range(CACHE_SIZE)] @@ -117,30 +123,77 @@ def test_flush_thread(self): self.cache_storage.save_transaction(tx) for tx in txs: - self.assertIn(tx.hash, self.cache_storage.dirty_txs) + self.assertIn(tx.hash, self.cache_storage.cache_data.dirty_txs) # Flush deferred is not None - self.assertIsNotNone(self.cache_storage.flush_deferred) - last_flush_deferred = self.cache_storage.flush_deferred + self.assertIsNotNone(self.cache_storage.cache_data.flush_deferred) + last_flush_deferred = self.cache_storage.cache_data.flush_deferred + + # A call when the deferred already exists, shouldn't override it self.cache_storage._start_flush_thread() - self.assertEqual(last_flush_deferred, self.cache_storage.flush_deferred) + self.assertEqual(last_flush_deferred, self.cache_storage.cache_data.flush_deferred) # We flush the cache and flush_deferred becomes None - self.cache_storage._cb_flush_thread(self.cache_storage.dirty_txs.copy()) - self.assertIsNone(self.cache_storage.flush_deferred) + self.cache_storage._cb_flush_thread(None) + self.assertIsNone(self.cache_storage.cache_data.flush_deferred) # After the interval it becomes not None again self.clock.advance(10) - self.assertIsNotNone(self.cache_storage.flush_deferred) + self.assertIsNotNone(self.cache_storage.cache_data.flush_deferred) # If an err occurs, it will become None again and then not None after the interval self.cache_storage._err_flush_thread('') - self.assertIsNone(self.cache_storage.flush_deferred) + self.assertIsNone(self.cache_storage.cache_data.flush_deferred) self.clock.advance(5) - self.assertIsNotNone(self.cache_storage.flush_deferred) + self.assertIsNotNone(self.cache_storage.cache_data.flush_deferred) # Remove element from cache to test a part of the code - del self.cache_storage.cache[next(iter(self.cache_storage.dirty_txs))] - self.cache_storage._flush_to_storage(self.cache_storage.dirty_txs.copy()) + del self.cache_storage.cache_data.cache[next(iter(self.cache_storage.cache_data.dirty_txs))] + self.cache_storage._flush_to_storage(self.cache_storage.cache_data.dirty_txs.copy()) + + async def test_flush_thread_global_reactor(self) -> None: + interval = 1 + reactor = get_global_reactor() + artifacts = self.get_builder() \ + .use_tx_storage_cache(capacity=5) \ + .set_wallet(self._create_test_wallet(unlocked=True)) \ + .set_reactor(reactor) \ + .build() + + self.manager = artifacts.manager + self.cache_storage = self.manager.tx_storage + self.cache_storage.cache_data.interval = interval + + og_start_flush_thread = self.cache_storage._start_flush_thread + og_cb_flush_thread = self.cache_storage._cb_flush_thread + og_err_flush_thread = self.cache_storage._err_flush_thread + + self.cache_storage._start_flush_thread = Mock(wraps=og_start_flush_thread) + self.cache_storage._cb_flush_thread = Mock(wraps=og_cb_flush_thread) + self.cache_storage._err_flush_thread = Mock(wraps=og_err_flush_thread) + + self.manager.start() + + txs = [self._get_new_tx(nonce) for nonce in range(CACHE_SIZE)] + for tx in txs: + self.cache_storage.save_transaction(tx) + + for tx in txs: + assert tx.hash in self.cache_storage.cache_data.dirty_txs + + assert self.cache_storage.cache_data.flush_deferred is None + + assert self.cache_storage._start_flush_thread.call_count == 0 + assert self.cache_storage._cb_flush_thread.call_count == 0 + assert self.cache_storage._err_flush_thread.call_count == 0 + + await deferLater(reactor, interval + 0.1, lambda: None) + + assert self.cache_storage._start_flush_thread.call_count == 1 + assert self.cache_storage._cb_flush_thread.call_count == 1 + assert self.cache_storage._err_flush_thread.call_count == 0 + + assert self.cache_storage.cache_data.flush_deferred is None + self.clean_pending(required_to_quiesce=False) def test_topological_sort_dfs(self): self.manager.daa.TEST_MODE = TestMode.TEST_ALL_WEIGHT diff --git a/hathor_tests/tx/test_fee_tokens.py b/hathor_tests/tx/test_fee_tokens.py index 797870987..a58972201 100644 --- a/hathor_tests/tx/test_fee_tokens.py +++ b/hathor_tests/tx/test_fee_tokens.py @@ -14,7 +14,7 @@ import pytest -from hathor.conf.settings import NanoContractsSetting +from hathor.conf.settings import FeatureSetting from hathor.crypto.util import decode_address from hathor.exception import InvalidNewTransaction from hathor.indexes.tokens_index import TokenUtxoInfo @@ -634,7 +634,7 @@ def test_fee_token_activation(self) -> None: 'testnet', unlock_wallet=True, wallet_index=True, - settings=self._settings._replace(ENABLE_NANO_CONTRACTS=NanoContractsSetting.DISABLED), + settings=self._settings._replace(ENABLE_FEE_BASED_TOKENS=FeatureSetting.DISABLED), ) with pytest.raises(InvalidNewTransaction) as e: create_fee_tokens(custom_manager, self.address_b58) diff --git a/hathor_tests/tx/test_indexes.py b/hathor_tests/tx/test_indexes.py index c5cdc1a6b..0173bd447 100644 --- a/hathor_tests/tx/test_indexes.py +++ b/hathor_tests/tx/test_indexes.py @@ -1,5 +1,6 @@ from hathor.crypto.util import decode_address from hathor.graphviz import GraphvizVisualizer +from hathor.indexes import RocksDBIndexesManager from hathor.simulator.utils import add_new_block, add_new_blocks from hathor.storage.rocksdb_storage import RocksDBStorage from hathor.transaction import Transaction @@ -31,11 +32,10 @@ def test_tx_tips_with_conflict(self): tx1.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx1) self.assertTrue(self.manager.propagate_tx(tx1)) - if self.manager.tx_storage.indexes.mempool_tips is not None: - self.assertEqual( - {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, - {tx1.hash} - ) + self.assertEqual( + {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, + {tx1.hash} + ) outputs = [WalletOutputInfo(address=decode_address(address), value=value, timelock=None)] @@ -46,11 +46,10 @@ def test_tx_tips_with_conflict(self): tx2.timestamp = int(self.clock.seconds()) + 1 self.manager.cpu_mining_service.resolve(tx2) self.assertTrue(self.manager.propagate_tx(tx2)) - if self.manager.tx_storage.indexes.mempool_tips is not None: - self.assertEqual( - {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, - {tx2.hash} - ) + self.assertEqual( + {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, + {tx2.hash} + ) tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.timestamp = tx2.timestamp + 1 @@ -59,14 +58,13 @@ def test_tx_tips_with_conflict(self): self.assertNotEqual(tx2.hash, tx3.hash) self.assertTrue(self.manager.propagate_tx(tx3)) self.assertIn(tx3.hash, tx2.get_metadata().conflict_with) - if self.manager.tx_storage.indexes.mempool_tips is not None: - self.assertEqual( - {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, - # XXX: what should we expect here? I don't think we should exclude both tx2 and tx3, but maybe let the - # function using the index decide - # {tx1.hash, tx3.hash} - {tx1.hash} - ) + self.assertEqual( + {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, + # XXX: what should we expect here? I don't think we should exclude both tx2 and tx3, but maybe let the + # function using the index decide + # {tx1.hash, tx3.hash} + {tx1.hash} + ) def test_tx_tips_voided(self): from hathor.wallet.base_wallet import WalletOutputInfo @@ -88,11 +86,10 @@ def test_tx_tips_voided(self): tx1.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx1) self.assertTrue(self.manager.propagate_tx(tx1)) - if self.manager.tx_storage.indexes.mempool_tips is not None: - self.assertEqual( - {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, - {tx1.hash} - ) + self.assertEqual( + {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, + {tx1.hash} + ) tx2 = self.manager.wallet.prepare_transaction_compute_inputs(Transaction, outputs, self.manager.tx_storage) tx2.weight = 2.0 @@ -101,11 +98,10 @@ def test_tx_tips_voided(self): tx2.timestamp = int(self.clock.seconds()) + 1 self.manager.cpu_mining_service.resolve(tx2) self.assertTrue(self.manager.propagate_tx(tx2)) - if self.manager.tx_storage.indexes.mempool_tips is not None: - self.assertEqual( - {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, - {tx2.hash} - ) + self.assertEqual( + {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, + {tx2.hash} + ) tx3 = Transaction.create_from_struct(tx2.get_struct()) tx3.weight = 3.0 @@ -117,13 +113,12 @@ def test_tx_tips_voided(self): self.assertTrue(self.manager.propagate_tx(tx3)) # self.assertIn(tx3.hash, tx2.get_metadata().voided_by) self.assertIn(tx3.hash, tx2.get_metadata().conflict_with) - if self.manager.tx_storage.indexes.mempool_tips is not None: - self.assertEqual( - {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, - # XXX: what should we expect here? I don't think we should exclude both tx2 and tx3, but maybe let the - # function using the index decide - {tx1.hash, tx3.hash} - ) + self.assertEqual( + {tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter(self.manager.tx_storage)}, + # XXX: what should we expect here? I don't think we should exclude both tx2 and tx3, but maybe let the + # function using the index decide + {tx1.hash, tx3.hash} + ) def test_genesis_not_in_mempool(self): mempool_txs = list(self.tx_storage.indexes.mempool_tips.iter_all(self.tx_storage)) @@ -135,8 +130,6 @@ def test_utxo_index_genesis(self): from hathor_tests.utils import GENESIS_ADDRESS_B58 HTR_UID = self._settings.HATHOR_TOKEN_UID - - assert self.tx_storage.indexes is not None utxo_index = self.tx_storage.indexes.utxo # let's check everything is alright, all UTXOs should currently be from just the mined blocks and genesis @@ -177,8 +170,6 @@ def test_utxo_index_genesis(self): def test_utxo_index_reorg(self): from hathor.indexes.utxo_index import UtxoIndexItem - - assert self.tx_storage.indexes is not None utxo_index = self.tx_storage.indexes.utxo add_new_blocks(self.manager, 5, advance_clock=15) @@ -259,7 +250,7 @@ def check_utxos(*args): block2 = self.manager.generate_mining_block(parent_block_hash=block1.parents[0], address=decode_address(address)) block2.parents[1:] = [txA2.hash, txB2.hash] - block2.timestamp = block1.timestamp + block2.timestamp = block1.timestamp + 1 block2.weight = 4 self.manager.cpu_mining_service.resolve(block2) self.manager.propagate_tx(block2) @@ -276,8 +267,6 @@ def check_utxos(*args): def test_utxo_index_simple(self): from hathor.indexes.utxo_index import UtxoIndexItem - - assert self.tx_storage.indexes is not None utxo_index = self.tx_storage.indexes.utxo address = self.get_address(0) @@ -357,8 +346,6 @@ def test_utxo_index_limits(self): from hathor.indexes.utxo_index import UtxoIndexItem _debug = False - - assert self.tx_storage.indexes is not None utxo_index = self.tx_storage.indexes.utxo address = self.get_address(0) @@ -437,8 +424,6 @@ def test_utxo_index_after_push_tx(self): from hathor.indexes.utxo_index import UtxoIndexItem from hathor.transaction import TxInput, TxOutput from hathor.transaction.scripts import P2PKH - - assert self.tx_storage.indexes is not None utxo_index = self.tx_storage.indexes.utxo address = self.get_address(0) @@ -509,8 +494,6 @@ def test_utxo_index_last(self): from hathor.indexes.utxo_index import UtxoIndexItem from hathor.transaction import TxInput, TxOutput from hathor.transaction.scripts import P2PKH - - assert self.tx_storage.indexes is not None utxo_index = self.tx_storage.indexes.utxo address = self.get_address(0) @@ -702,12 +685,15 @@ def setUp(self): parser = VertexParser(settings=self._settings) nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) vertex_children_service = RocksDBVertexChildrenService(rocksdb_storage) + indexes = RocksDBIndexesManager(rocksdb_storage=rocksdb_storage, settings=self._settings) self.tx_storage = TransactionRocksDBStorage( - rocksdb_storage, + reactor=self.reactor, + rocksdb_storage=rocksdb_storage, settings=self._settings, vertex_parser=parser, nc_storage_factory=nc_storage_factory, vertex_children_service=vertex_children_service, + indexes=indexes, ) self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] diff --git a/hathor_tests/tx/test_indexes4.py b/hathor_tests/tx/test_indexes4.py index 0548fb2c6..7d0137214 100644 --- a/hathor_tests/tx/test_indexes4.py +++ b/hathor_tests/tx/test_indexes4.py @@ -49,6 +49,7 @@ def _build_randomized_blockchain(self, *, utxo_index=False): value = 500 tx = gen_new_tx(manager, address, value) assert manager.propagate_tx(tx) + self.clock.advance(1) return manager def test_index_initialization(self): @@ -56,7 +57,6 @@ def test_index_initialization(self): # XXX: this test makes use of the internals of TipsIndex, AddressIndex and UtxoIndex tx_storage = self.manager.tx_storage - assert tx_storage.indexes is not None # XXX: sanity check that we've at least produced something self.assertGreater(tx_storage.get_vertices_count(), 3) diff --git a/hathor_tests/tx/test_indexes_nc_history.py b/hathor_tests/tx/test_indexes_nc_history.py index 471b9085e..97e0cf156 100644 --- a/hathor_tests/tx/test_indexes_nc_history.py +++ b/hathor_tests/tx/test_indexes_nc_history.py @@ -1,5 +1,6 @@ from hathor.conf import HathorSettings from hathor.crypto.util import get_address_b58_from_bytes +from hathor.indexes import RocksDBIndexesManager from hathor.nanocontracts import Blueprint, Context, public from hathor.nanocontracts.catalog import NCBlueprintCatalog from hathor.nanocontracts.utils import sign_openssl @@ -114,7 +115,7 @@ def test_transaction_count(self) -> None: manager = self.create_peer_from_builder(builder) assert isinstance(manager.tx_storage, TransactionRocksDBStorage) path = manager.tx_storage._rocksdb_storage.path - indexes_manager = not_none(manager.tx_storage.indexes) + indexes_manager = manager.tx_storage.indexes nc_history_index = not_none(indexes_manager.nc_history) private_key = unittest.OCB_TEST_PRIVKEY.hex() password = unittest.OCB_TEST_PASSWORD.hex() @@ -174,7 +175,7 @@ def test_transaction_count(self) -> None: # Test loading counts from existing db builder2 = self.get_builder().set_rocksdb_path(path).enable_nc_indexes() manager2 = self.create_peer_from_builder(builder2) - indexes_manager2 = not_none(manager2.tx_storage.indexes) + indexes_manager2 = manager2.tx_storage.indexes nc_history_index = not_none(indexes_manager2.nc_history) assert nc_history_index.get_transaction_count(nc1.hash) == 3 @@ -199,12 +200,15 @@ def setUp(self): vertex_parser = VertexParser(settings=self._settings) nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) vertex_children_service = RocksDBVertexChildrenService(rocksdb_storage) + indexes = RocksDBIndexesManager(rocksdb_storage=rocksdb_storage, settings=settings) self.tx_storage = TransactionRocksDBStorage( - rocksdb_storage, + reactor=self.reactor, + rocksdb_storage=rocksdb_storage, settings=self._settings, vertex_parser=vertex_parser, nc_storage_factory=nc_storage_factory, vertex_children_service=vertex_children_service, + indexes=indexes, ) self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] diff --git a/hathor_tests/tx/test_mempool_tips_index.py b/hathor_tests/tx/test_mempool_tips_index.py index fff713f1e..576e708cf 100644 --- a/hathor_tests/tx/test_mempool_tips_index.py +++ b/hathor_tests/tx/test_mempool_tips_index.py @@ -30,8 +30,6 @@ def setUp(self) -> None: self.manager = self.create_peer_from_builder(builder) self.tx_storage = self.manager.tx_storage - assert self.tx_storage.indexes is not None - assert self.tx_storage.indexes.mempool_tips is not None self.mempool_tips = self.tx_storage.indexes.mempool_tips self.dag_builder = TestDAGBuilder.from_manager(self.manager) diff --git a/hathor_tests/tx/test_multisig.py b/hathor_tests/tx/test_multisig.py index b8015db22..bff867eaf 100644 --- a/hathor_tests/tx/test_multisig.py +++ b/hathor_tests/tx/test_multisig.py @@ -6,6 +6,7 @@ from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.exceptions import ScriptError from hathor.transaction.scripts import P2PKH, MultiSig, create_output_script, parse_address_script, script_eval +from hathor.transaction.scripts.opcode import OpcodesVersion from hathor.wallet.base_wallet import WalletBalance, WalletOutputInfo from hathor.wallet.util import generate_multisig_address, generate_multisig_redeem_script, generate_signature from hathor_tests import unittest @@ -123,6 +124,7 @@ def test_spend_multisig(self): # Now we propagate the correct self.assertTrue(self.manager.propagate_tx(tx)) + self.clock.advance(1) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, first_block_amount + 300)) @@ -135,7 +137,7 @@ def test_spend_multisig(self): expected_dict = {'type': 'MultiSig', 'address': self.multisig_address_b58, 'timelock': None} self.assertEqual(cls_script.to_human_readable(), expected_dict) - script_eval(tx, tx_input, tx1) + script_eval(tx, tx_input, tx1, version=OpcodesVersion.V2) # Script error with self.assertRaises(ScriptError): diff --git a/hathor_tests/tx/test_nano_contracts.py b/hathor_tests/tx/test_nano_contracts.py index 9dc195cba..00b681aaa 100644 --- a/hathor_tests/tx/test_nano_contracts.py +++ b/hathor_tests/tx/test_nano_contracts.py @@ -4,6 +4,7 @@ from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH, NanoContractMatchValues, script_eval +from hathor.transaction.scripts.opcode import OpcodesVersion from hathor.util import json_dumpb from hathor_tests import unittest @@ -38,4 +39,4 @@ def test_match_values(self): txin = TxInput(b'aa', 0, input_data) spent_tx = Transaction(outputs=[TxOutput(20, script)]) tx = Transaction(outputs=[TxOutput(20, P2PKH.create_output_script(address))]) - script_eval(tx, txin, spent_tx) + script_eval(tx, txin, spent_tx, OpcodesVersion.V1) diff --git a/hathor_tests/tx/test_reward_lock.py b/hathor_tests/tx/test_reward_lock.py index 838e1a551..55f652771 100644 --- a/hathor_tests/tx/test_reward_lock.py +++ b/hathor_tests/tx/test_reward_lock.py @@ -165,6 +165,7 @@ def test_mempool_tx_invalid_after_reorg(self) -> None: assert tx_address not in balance_per_address self.assertEqual(tx.static_metadata.min_height, unlock_height) self.assertTrue(self.manager.on_new_tx(tx)) + self.clock.advance(1) balance_per_address = self.manager.wallet.get_balance_per_address(self._settings.HATHOR_TOKEN_UID) assert balance_per_address[tx_address] == 6400 diff --git a/hathor_tests/tx/test_scripts.py b/hathor_tests/tx/test_scripts.py index a72d8409a..768d3fffb 100644 --- a/hathor_tests/tx/test_scripts.py +++ b/hathor_tests/tx/test_scripts.py @@ -32,6 +32,7 @@ get_script_op, ) from hathor.transaction.scripts.opcode import ( + OpcodesVersion, op_checkdatasig, op_checkmultisig, op_checksig, @@ -257,7 +258,7 @@ def test_checksig(self) -> None: signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock(), version=OpcodesVersion.V2) # wrong signature puts False (0) on stack stack: Stack = [b'aaaaaaaaa', pubkey_bytes] @@ -282,7 +283,7 @@ def test_checksig_cache(self) -> None: signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock(), version=OpcodesVersion.V2) stack: Stack = [signature, pubkey_bytes] self.assertIsNone(tx._sighash_data_cache) @@ -512,28 +513,28 @@ def test_find_p2pkh(self) -> None: # try with just 1 output stack: Stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out_genesis)]) - extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx, version=OpcodesVersion.V2) op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs and correct output among them stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out_genesis), TxOutput(1, out3)]) - extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx, version=OpcodesVersion.V2) op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs without correct amount output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(2, out_genesis), TxOutput(1, out3)]) - extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx, version=OpcodesVersion.V2) with self.assertRaises(VerifyFailed): op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) # several outputs without correct address output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out3)]) - extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx, version=OpcodesVersion.V2) with self.assertRaises(VerifyFailed): op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) @@ -547,7 +548,7 @@ def test_greaterthan_timestamp(self) -> None: tx = Transaction() stack: Stack = [struct.pack('!I', timestamp)] - extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock(), version=OpcodesVersion.V2) with self.assertRaises(TimeLocked): tx.timestamp = timestamp - 1 @@ -573,7 +574,7 @@ def test_checkmultisig(self) -> None: tx = Transaction(inputs=[txin], outputs=[txout]) data_to_sign = tx.get_sighash_all() - extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock(), version=OpcodesVersion.V2) wallet = HDWallet() wallet._manually_initialize() diff --git a/hathor_tests/tx/test_timelock.py b/hathor_tests/tx/test_timelock.py index 82eca54c5..faf9e8d99 100644 --- a/hathor_tests/tx/test_timelock.py +++ b/hathor_tests/tx/test_timelock.py @@ -40,6 +40,7 @@ def test_timelock(self): tx1.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx1) self.manager.propagate_tx(tx1) + self.clock.advance(1) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500)) @@ -79,6 +80,7 @@ def test_timelock(self): tx3.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx3) propagated = self.manager.propagate_tx(tx3) + self.clock.advance(1) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens) - 500 - 700)) self.assertTrue(propagated) @@ -99,6 +101,7 @@ def test_timelock(self): tx4.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx4) propagated = self.manager.propagate_tx(tx4) + self.clock.advance(1) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(500, sum(blocks_tokens[:3]))) self.assertTrue(propagated) @@ -107,6 +110,7 @@ def test_timelock(self): tx2.timestamp = int(self.clock.seconds()) self.manager.cpu_mining_service.resolve(tx2) propagated = self.manager.propagate_tx(tx2) + self.clock.advance(1) self.assertEqual(self.manager.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, sum(blocks_tokens[:3]))) self.assertTrue(propagated) diff --git a/hathor_tests/tx/test_tips.py b/hathor_tests/tx/test_tips.py index 21d2110aa..d4c3eae22 100644 --- a/hathor_tests/tx/test_tips.py +++ b/hathor_tests/tx/test_tips.py @@ -14,8 +14,6 @@ def setUp(self): self.manager = self.create_peer(self.network, unlock_wallet=True) def get_tips(self): - assert self.manager.tx_storage.indexes is not None - assert self.manager.tx_storage.indexes.mempool_tips is not None return self.manager.tx_storage.indexes.mempool_tips.get() def test_tips_back(self): diff --git a/hathor_tests/tx/test_traversal.py b/hathor_tests/tx/test_traversal.py index 6041082f2..d8a538e78 100644 --- a/hathor_tests/tx/test_traversal.py +++ b/hathor_tests/tx/test_traversal.py @@ -100,6 +100,7 @@ def _run_lr(self, walk, skip_root=True): seen.add(tx.hash) self.assertGreaterEqual(tx.timestamp, last_timestamp) last_timestamp = tx.timestamp + walk.add_neighbors() return seen def _run_rl(self, walk): @@ -109,6 +110,7 @@ def _run_rl(self, walk): seen.add(tx.hash) self.assertLessEqual(tx.timestamp, last_timestamp) last_timestamp = tx.timestamp + walk.add_neighbors() return seen @@ -131,6 +133,7 @@ def _run_lr(self, walk, skip_root=True): distance[tx.hash] = dist self.assertGreaterEqual(dist, last_dist) last_dist = dist + walk.add_neighbors() return seen def _run_rl(self, walk): @@ -146,6 +149,7 @@ def _run_rl(self, walk): distance[tx.hash] = dist self.assertGreaterEqual(dist, last_dist) last_dist = dist + walk.add_neighbors() return seen @@ -159,10 +163,12 @@ def _run_lr(self, walk, skip_root=True): seen = set() for tx in walk.run(self.root_tx, skip_root=skip_root): seen.add(tx.hash) + walk.add_neighbors() return seen def _run_rl(self, walk): seen = set() for tx in walk.run(self.root_tx, skip_root=True): seen.add(tx.hash) + walk.add_neighbors() return seen diff --git a/hathor_tests/tx/test_tx.py b/hathor_tests/tx/test_tx.py index 597907c58..1e537ee9a 100644 --- a/hathor_tests/tx/test_tx.py +++ b/hathor_tests/tx/test_tx.py @@ -1,7 +1,7 @@ import base64 import hashlib from math import isinf, isnan -from unittest.mock import patch +from unittest.mock import Mock, patch import pytest @@ -36,6 +36,7 @@ from hathor.transaction.scripts import P2PKH, parse_address_script from hathor.transaction.util import int_to_bytes from hathor.transaction.validation_state import ValidationState +from hathor.verification.verification_params import VerificationParams from hathor.wallet import Wallet from hathor_tests import unittest from hathor_tests.utils import ( @@ -67,6 +68,8 @@ def setUp(self): blocks = add_blocks_unlock_reward(self.manager) self.last_block = blocks[-1] + self.verification_params = VerificationParams.default_for_mempool(best_block=Mock()) + def test_input_output_match_less_htr(self): genesis_block = self.genesis_blocks[0] @@ -86,7 +89,7 @@ def test_input_output_match_less_htr(self): best_block = self.manager.tx_storage.get_best_block() block_storage = self.manager.get_nc_block_storage(best_block) with self.assertRaises(InputOutputMismatch): - self._verifiers.tx.verify_sum(self._settings, tx.get_complete_token_info(block_storage)) + self._verifiers.tx.verify_sum(self._settings, tx, tx.get_complete_token_info(block_storage)) def test_input_output_match_more_htr(self): genesis_block = self.genesis_blocks[0] @@ -107,7 +110,7 @@ def test_input_output_match_more_htr(self): best_block = self.manager.tx_storage.get_best_block() block_storage = self.manager.get_nc_block_storage(best_block) with self.assertRaises(InputOutputMismatch): - self._verifiers.tx.verify_sum(self._settings, tx.get_complete_token_info(block_storage)) + self._verifiers.tx.verify_sum(self._settings, tx, tx.get_complete_token_info(block_storage)) def test_validation(self): # add 100 blocks and check that walking through get_next_block_best_chain yields the same blocks @@ -147,7 +150,7 @@ def test_script(self): _input.data = data_wrong with self.assertRaises(InvalidInputData): - self._verifiers.tx.verify_inputs(tx) + self._verifiers.tx.verify_inputs(tx, params=self.verification_params) def test_too_many_inputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -776,10 +779,10 @@ def test_tx_methods(self): tx2.timestamp = tx2_timestamp # Verify inputs timestamps - self._verifiers.tx.verify_inputs(tx2) + self._verifiers.tx.verify_inputs(tx2, params=self.verification_params) tx2.timestamp = 2 with self.assertRaises(TimestampError): - self._verifiers.tx.verify_inputs(tx2) + self._verifiers.tx.verify_inputs(tx2, params=self.verification_params) tx2.timestamp = tx2_timestamp # Validate maximum distance between blocks @@ -993,7 +996,7 @@ def _test_txin_data_limit(self, offset): outputs=[_output], storage=self.tx_storage ) - self._verifiers.tx.verify_inputs(tx, skip_script=True) + self._verifiers.tx.verify_inputs(tx, skip_script=True, params=self.verification_params) def test_txin_data_limit_exceeded(self): with self.assertRaises(InvalidInputDataSize): diff --git a/hathor_tests/tx/test_tx_storage.py b/hathor_tests/tx/test_tx_storage.py index 15dff84e2..a81d34f66 100644 --- a/hathor_tests/tx/test_tx_storage.py +++ b/hathor_tests/tx/test_tx_storage.py @@ -154,7 +154,7 @@ def test_vertices_count(self): def validate_save(self, obj): self.tx_storage.save_transaction(obj) - self.tx_storage.add_to_indexes(obj) + self.tx_storage.indexes.add_to_non_critical_indexes(obj) loaded_obj1 = self.tx_storage.get_transaction(obj.hash) @@ -169,31 +169,29 @@ def validate_save(self, obj): idx_elem = (obj.timestamp, obj.hash) # Testing add and remove from cache - if self.tx_storage.indexes is not None: - self.assertIn(idx_elem, self.tx_storage.indexes.sorted_all) - if obj.is_block: - self.assertIn(idx_elem, self.tx_storage.indexes.sorted_blocks) - self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_txs) - else: - self.assertIn(idx_elem, self.tx_storage.indexes.sorted_txs) - self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_blocks) - - self.tx_storage.del_from_indexes(obj, remove_all=True) - - if self.tx_storage.indexes is not None: - self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_all) + self.assertIn(idx_elem, self.tx_storage.indexes.sorted_all) + if obj.is_block: + self.assertIn(idx_elem, self.tx_storage.indexes.sorted_blocks) self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_txs) + else: + self.assertIn(idx_elem, self.tx_storage.indexes.sorted_txs) self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_blocks) - self.tx_storage.add_to_indexes(obj) - if self.tx_storage.indexes is not None: - self.assertIn(idx_elem, self.tx_storage.indexes.sorted_all) - if obj.is_block: - self.assertIn(idx_elem, self.tx_storage.indexes.sorted_blocks) - self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_txs) - else: - self.assertIn(idx_elem, self.tx_storage.indexes.sorted_txs) - self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_blocks) + self.tx_storage.indexes.del_from_critical_indexes(obj) + self.tx_storage.indexes.del_from_non_critical_indexes(obj, remove_all=True) + + self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_all) + self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_txs) + self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_blocks) + + self.tx_storage.indexes.add_to_non_critical_indexes(obj) + self.assertIn(idx_elem, self.tx_storage.indexes.sorted_all) + if obj.is_block: + self.assertIn(idx_elem, self.tx_storage.indexes.sorted_blocks) + self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_txs) + else: + self.assertIn(idx_elem, self.tx_storage.indexes.sorted_txs) + self.assertNotIn(idx_elem, self.tx_storage.indexes.sorted_blocks) def test_save_block(self): self.validate_save(self.block) @@ -582,7 +580,7 @@ def _test_remove_tx_or_block(self, tx): tx_hash = tx.hash super()._test_remove_tx_or_block(tx) # XXX: make sure it was removed from the internal storage - self.assertFalse(self.tx_storage.store.transaction_exists(tx_hash)) + self.assertFalse(self.tx_storage.transaction_exists(tx_hash)) class TransactionRocksDBStorageTest(BaseTransactionStorageTest): diff --git a/hathor_tests/tx/test_verification_mempool.py b/hathor_tests/tx/test_verification_mempool.py index 4089b3811..f7019dd20 100644 --- a/hathor_tests/tx/test_verification_mempool.py +++ b/hathor_tests/tx/test_verification_mempool.py @@ -4,7 +4,7 @@ from hathor.checkpoint import Checkpoint from hathor.exception import InvalidNewTransaction -from hathor.nanocontracts import Blueprint, Context, fallback, public +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, fallback, public from hathor.nanocontracts.exception import ( BlueprintDoesNotExist, NanoContractDoesNotExist, @@ -189,6 +189,122 @@ def test_conflict_with_confirmed_tx(self) -> None: self.manager.vertex_handler.on_new_mempool_transaction(tx3) assert isinstance(e.exception.__cause__, ConflictWithConfirmedTxError) + def test_conflict_with_confirmed_nc_fail_is_allowed(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..32] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + + tx0.out[0] <<< tx_fail tx_ok + + tx_fail.nc_id = tx1 + tx_fail.nc_method = fail() + + tx1 <-- b30 + tx_fail <-- b31 + + b31 < tx_ok + ''') + artifacts.propagate_with(self.manager, up_to_before='tx_ok') + + b31 = artifacts.get_typed_vertex('b31', Block) + tx_fail = artifacts.get_typed_vertex('tx_fail', Transaction) + tx_ok = artifacts.get_typed_vertex('tx_ok', Transaction) + + assert tx_fail.get_metadata().first_block == b31.hash + assert tx_fail.get_metadata().nc_execution == NCExecutionState.FAILURE + assert tx_fail.get_metadata().voided_by == {tx_fail.hash, NC_EXECUTION_FAIL_ID} + + tx_ok.timestamp = int(self.manager.reactor.seconds()) + self.dag_builder._exporter._vertex_resolver(tx_ok) + + assert self.manager.vertex_handler.on_new_mempool_transaction(tx_ok) + + assert self.manager.tx_storage.transaction_exists(tx_ok.hash) + mempool_hashes = { + tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter_all(self.manager.tx_storage) + } + assert tx_ok.hash in mempool_hashes + + def test_mempool_tx_returns_after_reorg_with_confirmed_nc_fail_conflict(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..32] + blockchain b31 a[32..32] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + + tx0.out[0] <<< tx_fail tx_ok + + tx_fail.nc_id = tx1 + tx_fail.nc_method = fail() + + tx_ok.nc_id = tx1 + tx_ok.nc_method = nop() + + tx1 <-- b30 + tx_fail <-- b31 + tx_ok <-- b32 + + b31 < tx_ok + tx_ok < a32 + a32.weight = 10 + ''') + artifacts.propagate_with(self.manager, up_to_before='tx_ok') + + b31 = artifacts.get_typed_vertex('b31', Block) + b32 = artifacts.get_typed_vertex('b32', Block) + a32 = artifacts.get_typed_vertex('a32', Block) + tx_fail = artifacts.get_typed_vertex('tx_fail', Transaction) + tx_ok = artifacts.get_typed_vertex('tx_ok', Transaction) + + assert tx_fail.get_metadata().first_block == b31.hash + assert tx_fail.get_metadata().nc_execution == NCExecutionState.FAILURE + assert tx_fail.get_metadata().voided_by == {tx_fail.hash, NC_EXECUTION_FAIL_ID} + + # Align reactor time with the chain so mempool timestamp checks pass. + self.clock.rightNow = b31.timestamp + 1 + + old_tx_ok_hash = tx_ok.hash + tx_ok.timestamp = int(self.manager.reactor.seconds()) + self.dag_builder._exporter._vertex_resolver(tx_ok) + if old_tx_ok_hash != tx_ok.hash: + # Keep b32 confirming tx_ok after the hash update. + b32.parents = [tx_ok.hash if h == old_tx_ok_hash else h for h in b32.parents] + # Ensure block timestamps are after tx_ok to satisfy parent timestamp checks. + b32.timestamp = tx_ok.timestamp + 1 + a32.timestamp = tx_ok.timestamp + 2 + self.dag_builder._exporter._vertex_resolver(b32) + self.dag_builder._exporter._vertex_resolver(a32) + + assert self.manager.vertex_handler.on_new_mempool_transaction(tx_ok) + + mempool_hashes = { + tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter_all(self.manager.tx_storage) + } + assert tx_ok.hash in mempool_hashes + + assert self.manager.vertex_handler.on_new_relayed_vertex(b32) + + tx_ok_confirmed = self.manager.tx_storage.get_transaction(tx_ok.hash) + assert tx_ok_confirmed.get_metadata().first_block == b32.hash + mempool_hashes = { + tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter_all(self.manager.tx_storage) + } + assert tx_ok.hash not in mempool_hashes + + assert self.manager.vertex_handler.on_new_relayed_vertex(a32) + + tx_ok_reorged = self.manager.tx_storage.get_transaction(tx_ok.hash) + assert tx_ok_reorged.get_metadata().first_block is None + mempool_hashes = { + tx.hash for tx in self.manager.tx_storage.indexes.mempool_tips.iter_all(self.manager.tx_storage) + } + assert tx_ok.hash in mempool_hashes + def test_too_many_between_conflicts(self) -> None: lines = [f'tx0.out[{i}] <<< txN tx{i + 1}' for i in range(0, MAX_BETWEEN_CONFLICTS + 1)] orders = [f'tx{i + 1} < txN' for i in range(0, MAX_BETWEEN_CONFLICTS + 1)] diff --git a/hathor_tests/unittest.py b/hathor_tests/unittest.py index 2e6142635..cf4138a50 100644 --- a/hathor_tests/unittest.py +++ b/hathor_tests/unittest.py @@ -309,10 +309,10 @@ def assertTipsNotEqual(self, manager1: HathorManager, manager2: HathorManager) - This method assert that something should not match, either the tx-tips or the block-tip. """ - tips1 = not_none(not_none(manager1.tx_storage.indexes).mempool_tips).get() - tips1 |= {not_none(manager1.tx_storage.indexes).height.get_tip()} - tips2 = not_none(not_none(manager2.tx_storage.indexes).mempool_tips).get() - tips2 |= {not_none(manager2.tx_storage.indexes).height.get_tip()} + tips1 = manager1.tx_storage.indexes.mempool_tips.get() + tips1 |= {manager1.tx_storage.indexes.height.get_tip()} + tips2 = manager2.tx_storage.indexes.mempool_tips.get() + tips2 |= {manager2.tx_storage.indexes.height.get_tip()} self.assertNotEqual(tips1, tips2) def assertTipsEqualSyncV2( @@ -324,8 +324,8 @@ def assertTipsEqualSyncV2( ) -> None: # tx tips if strict_sync_v2_indexes: - tips1 = not_none(not_none(manager1.tx_storage.indexes).mempool_tips).get() - tips2 = not_none(not_none(manager2.tx_storage.indexes).mempool_tips).get() + tips1 = manager1.tx_storage.indexes.mempool_tips.get() + tips2 = manager2.tx_storage.indexes.mempool_tips.get() else: tips1 = {tx.hash for tx in manager1.tx_storage.iter_mempool_tips()} tips2 = {tx.hash for tx in manager2.tx_storage.iter_mempool_tips()} @@ -341,8 +341,8 @@ def assertTipsEqualSyncV2( self.assertEqual(s1, s2) # best block (from height index) - b1 = not_none(manager1.tx_storage.indexes).height.get_tip() - b2 = not_none(manager2.tx_storage.indexes).height.get_tip() + b1 = manager1.tx_storage.indexes.height.get_tip() + b2 = manager2.tx_storage.indexes.height.get_tip() self.assertIn(b1, s2) self.assertIn(b2, s1) diff --git a/hathor_tests/utils.py b/hathor_tests/utils.py index 83ef4aef2..4866987ae 100644 --- a/hathor_tests/utils.py +++ b/hathor_tests/utils.py @@ -10,7 +10,6 @@ import requests from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import ec -from hathorlib.scripts import DataScript from twisted.internet.task import Clock from hathor.conf import HathorSettings @@ -29,6 +28,7 @@ from hathor.transaction.token_info import TokenVersion from hathor.transaction.util import get_deposit_token_deposit_amount from hathor.util import Random +from hathorlib.scripts import DataScript settings = HathorSettings() @@ -181,7 +181,7 @@ def add_new_tx( manager: HathorManager, address: str, value: int, - advance_clock: int | None = None, + advance_clock: int = 1, propagate: bool = True, name: str | None = None, ) -> Transaction: @@ -211,7 +211,7 @@ def add_new_tx( def add_new_transactions( manager: HathorManager, num_txs: int, - advance_clock: int | None = None, + advance_clock: int = 1, propagate: bool = True, name: str | None = None, ) -> list[Transaction]: @@ -535,9 +535,15 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m return tx -def create_fee_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, mint_amount: int = 300, - token_name: str = 'TestFeeCoin', token_symbol: str = 'TFC', - genesis_output_amount: Optional[int] = None) -> TokenCreationTransaction: +def create_fee_tokens( + manager: 'HathorManager', + address_b58: Optional[str] = None, + mint_amount: int = 300, + token_name: str = 'TestFeeCoin', + token_symbol: str = 'TFC', + genesis_output_amount: Optional[int] = None, + propagate: bool = True, +) -> TokenCreationTransaction: """Creates a new token and propagates a tx with the following UTXOs: 0. some tokens (already mint some tokens so they can be transferred); 1. mint authority; @@ -622,9 +628,11 @@ def create_fee_tokens(manager: 'HathorManager', address_b58: Optional[str] = Non input_.data = P2PKH.create_input_data(public_bytes, signature) manager.cpu_mining_service.resolve(tx) - manager.propagate_tx(tx) - assert isinstance(manager.reactor, Clock) - manager.reactor.advance(8) + + if propagate: + manager.propagate_tx(tx) + assert isinstance(manager.reactor, Clock) + manager.reactor.advance(8) return tx @@ -744,6 +752,7 @@ class EventMocker: next_id: int = 0 tx_data = TxData( hash='abc', + name='tx name', nonce=123, timestamp=456, signal_bits=0, diff --git a/hathor_tests/wallet/test_wallet_hd.py b/hathor_tests/wallet/test_wallet_hd.py index 0b06205ed..60dc0d104 100644 --- a/hathor_tests/wallet/test_wallet_hd.py +++ b/hathor_tests/wallet/test_wallet_hd.py @@ -1,6 +1,9 @@ +from unittest.mock import Mock + from hathor.crypto.util import decode_address from hathor.simulator.utils import add_new_block from hathor.transaction import Transaction +from hathor.verification.verification_params import VerificationParams from hathor.wallet import HDWallet from hathor.wallet.base_wallet import WalletBalance, WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import InsufficientFunds @@ -39,7 +42,8 @@ def test_transaction_and_balance(self): tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, [out], self.tx_storage) tx1.update_hash() verifier = self.manager.verification_service.verifiers.tx - verifier.verify_script(tx=tx1, input_tx=tx1.inputs[0], spent_tx=block) + params = VerificationParams.default_for_mempool(best_block=Mock()) + verifier.verify_script(tx=tx1, input_tx=tx1.inputs[0], spent_tx=block, params=params) tx1.storage = self.tx_storage tx1.get_metadata().validation = ValidationState.FULL self.wallet.on_new_tx(tx1) @@ -60,7 +64,7 @@ def test_transaction_and_balance(self): tx2.storage = self.tx_storage tx2.update_hash() tx2.storage = self.tx_storage - verifier.verify_script(tx=tx2, input_tx=tx2.inputs[0], spent_tx=tx1) + verifier.verify_script(tx=tx2, input_tx=tx2.inputs[0], spent_tx=tx1, params=params) tx2.get_metadata().validation = ValidationState.FULL tx2.init_static_metadata_from_storage(self._settings, self.tx_storage) self.tx_storage.save_transaction(tx2) diff --git a/hathorlib/.gitignore b/hathorlib/.gitignore new file mode 100644 index 000000000..5d1797180 --- /dev/null +++ b/hathorlib/.gitignore @@ -0,0 +1,6 @@ +__pycache__/ +*.py[cod] +/dist/ +/.coverage +/htmlcov/ +*~ diff --git a/hathorlib/.travis.yml b/hathorlib/.travis.yml new file mode 100644 index 000000000..38b188334 --- /dev/null +++ b/hathorlib/.travis.yml @@ -0,0 +1,14 @@ +language: python +python: + - "3.6" + +install: + - pip install poetry + - poetry install -n --no-root + +jobs: + include: + - stage: check + script: poetry run make check + - stage: tests + script: poetry run make tests diff --git a/hathorlib/LICENSE.txt b/hathorlib/LICENSE.txt new file mode 100644 index 000000000..b6a659da3 --- /dev/null +++ b/hathorlib/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Hathor Labs + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/hathorlib/Makefile b/hathorlib/Makefile new file mode 100644 index 000000000..f3681ce51 --- /dev/null +++ b/hathorlib/Makefile @@ -0,0 +1,57 @@ +py_sources = hathorlib/ $(wildcard *.py) +py_tests = tests/ $(wildcard *.py) + +.PHONY: all +all: check tests + +# testing: + +tests_lib = ./tests/ + +pytest_flags = -p no:warnings --cov-report=term --cov-report=html --cov=hathorlib +mypy_tests_flags = --warn-unused-configs --disallow-incomplete-defs --no-implicit-optional --warn-redundant-casts --strict-equality --disallow-subclassing-any --warn-return-any --disallow-untyped-decorators --show-error-codes +mypy_sources_flags = --strict --show-error-codes + +.PHONY: tests +tests: + pytest --durations=10 $(pytest_flags) --doctest-modules hathorlib --cov-fail-under=60 $(tests_lib) + +# checking: +# +.PHONY: mypy +mypy: mypy-sources mypy-tests + +.PHONY: mypy-sources +mypy-sources: $(py_sources) + mypy $(mypy_sources_flags) $^ + +.PHONY: mypy-tests +mypy-tests: $(py_tests) + mypy $(mypy_tests_flags) $^ + +.PHONY: flake8 +flake8: $(py_sources) $(py_tests) + flake8 $^ + +.PHONY: isort-check +isort-check: $(py_sources) $(py_tests) + isort --check-only $^ + +.PHONY: check +check: flake8 isort-check mypy + +# formatting: + +.PHONY: fmt +fmt: isort + +.PHONY: isort +isort: $(py_sources) $(py_tests) + isort -ac $^ + +# cleaning: + +.PHONY: clean-pyc +clean-pyc: + find hathorlib tests -name \*.pyc -delete + find hathorlib tests -name __pycache__ -delete diff --git a/hathorlib/README.md b/hathorlib/README.md new file mode 100644 index 000000000..5605e5748 --- /dev/null +++ b/hathorlib/README.md @@ -0,0 +1,30 @@ +hathorlib +========= + +Hathor Network base library. + +## Configuration + +To install dependencies, including optionals, run: + + poetry install -E client + +## Running the tests + +To run the tests using poetry virtualenv: + + poetry run make tests + +If are managing virtualenvs without poetry, make sure it's activated and run: + + make tests + +## Running linters + +To run linters: + + poetry run make check + +Or without poetry venv: + + make check \ No newline at end of file diff --git a/hathorlib/RELEASING.md b/hathorlib/RELEASING.md new file mode 100644 index 000000000..719edeb75 --- /dev/null +++ b/hathorlib/RELEASING.md @@ -0,0 +1,9 @@ +### Bump version + +Change field `version` on file `pyproject.toml`. + +### Release new version + +`poetry build` + +`poetry publish` \ No newline at end of file diff --git a/hathorlib/hathorlib/__init__.py b/hathorlib/hathorlib/__init__.py new file mode 100644 index 000000000..5c9c741a7 --- /dev/null +++ b/hathorlib/hathorlib/__init__.py @@ -0,0 +1,16 @@ + +from hathorlib.base_transaction import BaseTransaction, TxInput, TxOutput, TxVersion, sum_weights +from hathorlib.block import Block +from hathorlib.token_creation_tx import TokenCreationTransaction +from hathorlib.transaction import Transaction + +__all__ = [ + 'BaseTransaction', + 'Block', + 'TokenCreationTransaction', + 'Transaction', + 'TxInput', + 'TxOutput', + 'TxVersion', + 'sum_weights', +] diff --git a/hathorlib/hathorlib/base_transaction.py b/hathorlib/hathorlib/base_transaction.py new file mode 100644 index 000000000..9865bf301 --- /dev/null +++ b/hathorlib/hathorlib/base_transaction.py @@ -0,0 +1,730 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" +import base64 +import datetime +import hashlib +from abc import ABC, abstractmethod +from enum import IntEnum +from math import isfinite, log +from struct import error as StructError, pack +from typing import Any, ClassVar, Dict, List, Optional, Tuple, Type + +from _hashlib import HASH + +from hathorlib.conf import HathorSettings +from hathorlib.exceptions import InvalidOutputValue, WeightError +from hathorlib.scripts import P2PKH, DataScript, MultiSig, parse_address_script +from hathorlib.utils import int_to_bytes, unpack, unpack_len +from hathorlib.vertex_parser import VertexParser + +settings = HathorSettings() + +MAX_NONCE = 2**32 + +MAX_OUTPUT_VALUE = 2**63 # max value (inclusive) that is possible to encode: 9223372036854775808 ~= 9.22337e+18 +_MAX_OUTPUT_VALUE_32 = 2**31 - 1 # max value (inclusive) before having to use 8 bytes: 2147483647 ~= 2.14748e+09 + +TX_HASH_SIZE = 32 # 256 bits, 32 bytes + +# H = unsigned short (2 bytes), d = double(8), f = float(4), I = unsigned int (4), +# Q = unsigned long long int (64), B = unsigned char (1 byte) + +# Signal bits (B), version (B), inputs len (B), and outputs len (B), token uids len (B). +_SIGHASH_ALL_FORMAT_STRING = '!BBBBB' + +# Weight (d), timestamp (I), and parents len (B) +_GRAPH_FORMAT_STRING = '!dIB' + +# The int value of one byte +_ONE_BYTE = 0xFF + + +def sum_weights(w1: float, w2: float) -> float: + return aux_calc_weight(w1, w2, 1) + + +def sub_weights(w1: float, w2: float) -> float: + if w1 == w2: + return 0 + return aux_calc_weight(w1, w2, -1) + + +def aux_calc_weight(w1: float, w2: float, multiplier: int) -> float: + a = max(w1, w2) + b = min(w1, w2) + if b == 0.0: + # Zero is a special acc_weight. + # We could use float('-inf'), but it is not serializable. + return a + return a + log(1 + 2**(b - a) * multiplier, 2) + + +class TxVersion(IntEnum): + """Versions are sequential for blocks and transactions""" + + REGULAR_BLOCK = 0 + REGULAR_TRANSACTION = 1 + TOKEN_CREATION_TRANSACTION = 2 + MERGE_MINED_BLOCK = 3 + NANO_CONTRACT = 4 + ON_CHAIN_BLUEPRINT = 6 + + @classmethod + def _missing_(cls, value: Any) -> None: + assert isinstance(value, int), f"Value '{value}' must be an integer" + assert value <= _ONE_BYTE, f'Value {hex(value)} must not be larger than one byte' + + raise ValueError(f'Invalid version: {value}') + + def get_cls(self) -> Type['BaseTransaction']: + from hathorlib import Block, TokenCreationTransaction, Transaction + from hathorlib.nanocontracts.nanocontract import DeprecatedNanoContract + from hathorlib.nanocontracts.on_chain_blueprint import OnChainBlueprint + + cls_map: Dict[TxVersion, Type[BaseTransaction]] = { + TxVersion.REGULAR_BLOCK: Block, + TxVersion.REGULAR_TRANSACTION: Transaction, + TxVersion.TOKEN_CREATION_TRANSACTION: TokenCreationTransaction, + TxVersion.NANO_CONTRACT: DeprecatedNanoContract, + TxVersion.ON_CHAIN_BLUEPRINT: OnChainBlueprint, + } + + cls = cls_map.get(self) + + if cls is None: + raise ValueError('Invalid version.') + else: + return cls + + +class BaseTransaction(ABC): + """Hathor base transaction""" + + __slots__ = ( + 'version', 'signal_bits', 'weight', 'timestamp', 'nonce', 'inputs', 'outputs', 'parents', 'hash', 'headers' + ) + + # Even though nonce is serialized with different sizes for tx and blocks + # the same size is used for hashes to enable mining algorithm compatibility + SERIALIZATION_NONCE_SIZE: ClassVar[int] + HASH_NONCE_SIZE = 16 + HEX_BASE = 16 + + # Bits extracted from the first byte of the version field. They carry extra information that may be interpreted + # differently by each subclass of BaseTransaction. + # Currently only the Block subclass uses it, carrying information about Feature Activation bits and also extra + # bits reserved for future use, depending on the configuration. + signal_bits: int + + def __init__(self) -> None: + from hathorlib.headers import VertexBaseHeader + self.nonce: int = 0 + self.timestamp: int = 0 + self.signal_bits: int = 0 + self.version: int = 0 + self.weight: float = 0 + self.inputs: List['TxInput'] = [] + self.outputs: List['TxOutput'] = [] + self.parents: List[bytes] = [] + self.hash: bytes = b'' + self.headers: list[VertexBaseHeader] = [] + + @property + @abstractmethod + def is_block(self) -> bool: + raise NotImplementedError + + @property + @abstractmethod + def is_transaction(self) -> bool: + raise NotImplementedError + + def is_nano_contract(self) -> bool: + """Return True if this transaction is a nano contract or not.""" + return False + + def has_fees(self) -> bool: + """Return True if this transaction has fees or not.""" + return False + + def _get_formatted_fields_dict(self, short: bool = True) -> Dict[str, str]: + """ Used internally on __repr__ and __str__, returns a dict of `field_name: formatted_value`. + """ + from collections import OrderedDict + d = OrderedDict( + nonce='%d' % (self.nonce or 0), + timestamp='%s' % self.timestamp, + version='%s' % int(self.version), + weight='%f' % self.weight, + hash=self.hash_hex, + ) + if not short: + d.update( + inputs=repr(self.inputs), + outputs=repr(self.outputs), + parents=repr([x.hex() for x in self.parents]), + ) + return d + + def __repr__(self) -> str: + class_name = type(self).__name__ + return '%s(%s)' % (class_name, ', '.join('%s=%s' % i for i in self._get_formatted_fields_dict(False).items())) + + def __str__(self) -> str: + class_name = type(self).__name__ + return '%s(%s)' % (class_name, ', '.join('%s=%s' % i for i in self._get_formatted_fields_dict().items())) + + def clone(self) -> 'BaseTransaction': + """Return exact copy without sharing memory, including metadata if loaded. + + :return: Transaction or Block copy + """ + new_tx = self.create_from_struct(bytes(self)) + return new_tx + + def get_fields_from_struct(self, struct_bytes: bytes) -> bytes: + """ Gets all common fields for a Transaction and a Block from a buffer. + + :param struct_bytes: Bytes of a serialized transaction + :type struct_bytes: bytes + + :return: A buffer containing the remaining struct bytes + :rtype: bytes + + :raises ValueError: when the sequence of bytes is incorect + """ + buf = self.get_funds_fields_from_struct(struct_bytes) + buf = self.get_graph_fields_from_struct(buf) + return buf + + def get_header_from_bytes(self, buf: bytes) -> bytes: + """Parse bytes and return the next header in buffer.""" + if len(self.headers) >= self.get_maximum_number_of_headers(): + raise ValueError('too many headers') + + header_type = buf[:1] + header_class = VertexParser.get_header_parser(header_type) + header, buf = header_class.deserialize(self, buf) + self.headers.append(header) + return buf + + def get_maximum_number_of_headers(self) -> int: + """Return the maximum number of headers for this vertex.""" + return 2 + + @classmethod + @abstractmethod + def create_from_struct(cls, struct_bytes: bytes) -> 'BaseTransaction': + """ Create a transaction from its bytes. + + :param struct_bytes: Bytes of a serialized transaction + :type struct_bytes: bytes + + :return: A transaction or a block, depending on the class `cls` + + :raises ValueError: when the sequence of bytes is incorrect + """ + raise NotImplementedError + + def __eq__(self, other: object) -> bool: + """Two transactions are equal when their hash matches + + :raises NotImplement: when one of the transactions do not have a calculated hash + """ + if not isinstance(other, BaseTransaction): + return NotImplemented + if self.hash and other.hash: + return self.hash == other.hash + return False + + def __bytes__(self) -> bytes: + """Returns a byte representation of the transaction + + :rtype: bytes + """ + return self.get_struct() + + def __hash__(self) -> int: + assert self.hash is not None + return hash(self.hash) + + @property + def hash_hex(self) -> str: + """Return the current stored hash in hex string format""" + if self.hash is not None: + return self.hash.hex() + else: + return '' + + @property + def sum_outputs(self) -> int: + """Sum of the value of the outputs""" + return sum(output.value for output in self.outputs if not output.is_token_authority()) + + def get_target(self, override_weight: Optional[float] = None) -> int: + """Target to be achieved in the mining process""" + if not isfinite(self.weight): + raise WeightError + return int(2**(256 - (override_weight or self.weight)) - 1) + + def get_time_from_now(self, now: Optional[Any] = None) -> str: + """ Return a the time difference between now and the tx's timestamp + + :return: String in the format "0 days, 00:00:00" + :rtype: str + """ + if now is None: + now = datetime.datetime.now() + ts = datetime.datetime.fromtimestamp(self.timestamp) + dt = now - ts + seconds = dt.seconds + hours, seconds = divmod(seconds, 3600) + minutes, seconds = divmod(seconds, 60) + return '{} days, {:02d}:{:02d}:{:02d}'.format(dt.days, hours, minutes, seconds) + + @abstractmethod + def get_funds_fields_from_struct(self, buf: bytes) -> bytes: + raise NotImplementedError + + def get_graph_fields_from_struct(self, buf: bytes) -> bytes: + """ Gets all common graph fields for a Transaction and a Block from a buffer. + + :param buf: Bytes of a serialized transaction + :type buf: bytes + + :return: A buffer containing the remaining struct bytes + :rtype: bytes + + :raises ValueError: when the sequence of bytes is incorect + """ + (self.weight, self.timestamp, parents_len), buf = unpack(_GRAPH_FORMAT_STRING, buf) + + for _ in range(parents_len): + parent, buf = unpack_len(TX_HASH_SIZE, buf) # 256bits + self.parents.append(parent) + + return buf + + @abstractmethod + def get_funds_struct(self) -> bytes: + raise NotImplementedError + + def get_graph_struct(self) -> bytes: + """Return the graph data serialization of the transaction, without including the nonce field + + :return: graph data serialization of the transaction + :rtype: bytes + """ + struct_bytes = pack(_GRAPH_FORMAT_STRING, self.weight, self.timestamp, len(self.parents)) + for parent in self.parents: + struct_bytes += parent + return struct_bytes + + def get_headers_struct(self) -> bytes: + """Return the serialization of the headers only.""" + return b''.join(h.serialize() for h in self.headers) + + def get_struct_without_nonce(self) -> bytes: + """Return a partial serialization of the transaction, without including the nonce field + + :return: Partial serialization of the transaction + :rtype: bytes + """ + struct_bytes = self.get_funds_struct() + struct_bytes += self.get_graph_struct() + return struct_bytes + + def get_struct_nonce(self) -> bytes: + """Return a partial serialization of the transaction's proof-of-work, which is usually the nonce field + + :return: Partial serialization of the transaction's proof-of-work + :rtype: bytes + """ + assert self.SERIALIZATION_NONCE_SIZE is not None + struct_bytes = int_to_bytes(self.nonce, self.SERIALIZATION_NONCE_SIZE) + return struct_bytes + + def get_struct(self) -> bytes: + """Return the complete serialization of the transaction + + :rtype: bytes + """ + struct_bytes = self.get_struct_without_nonce() + struct_bytes += self.get_struct_nonce() + struct_bytes += self.get_headers_struct() + return struct_bytes + + def verify_pow(self, override_weight: Optional[float] = None) -> bool: + """Verify proof-of-work + + :raises PowError: when the hash is equal or greater than the target + """ + assert self.hash is not None + numeric_hash = int(self.hash.hex(), self.HEX_BASE) + minimum_target = self.get_target(override_weight) + if numeric_hash >= minimum_target: + return False + return True + + def get_funds_hash(self) -> bytes: + """Return the sha256 of the funds part of the transaction + + :return: the hash of the funds data + :rtype: bytes + """ + funds_hash = hashlib.sha256() + funds_hash.update(self.get_funds_struct()) + return funds_hash.digest() + + def get_graph_and_headers_hash(self) -> bytes: + """Return the sha256 of the graph part of the transaction + its headers + + :return: the hash of the graph and headers data + :rtype: bytes + """ + h = hashlib.sha256() + h.update(self.get_graph_struct()) + h.update(self.get_headers_struct()) + return h.digest() + + def get_mining_header_without_nonce(self) -> bytes: + """Return the transaction header without the nonce + + :return: transaction header without the nonce + :rtype: bytes + """ + data = self.get_funds_hash() + self.get_graph_and_headers_hash() + assert len(data) == 64, 'the mining data should have a fixed size of 64 bytes' + return data + + def calculate_hash1(self) -> HASH: + """Return the sha256 of the transaction without including the `nonce` + + :return: A partial hash of the transaction + :rtype: :py:class:`_hashlib.HASH` + """ + calculate_hash1 = hashlib.sha256() + calculate_hash1.update(self.get_mining_header_without_nonce()) + return calculate_hash1 + + def calculate_hash2(self, part1: HASH) -> bytes: + """Return the hash of the transaction, starting from a partial hash + + The hash of the transactions is the `sha256(sha256(bytes(tx))`. + + :param part1: A partial hash of the transaction, usually from `calculate_hash1` + :type part1: :py:class:`_hashlib.HASH` + + :return: The transaction hash + :rtype: bytes + """ + part1.update(self.nonce.to_bytes(self.HASH_NONCE_SIZE, byteorder='big', signed=False)) + # SHA256D gets the hash in littlean format. Reverse the bytes to get the big-endian representation. + return hashlib.sha256(part1.digest()).digest()[::-1] + + def calculate_hash(self) -> bytes: + """Return the full hash of the transaction + + It is the same as calling `self.calculate_hash2(self.calculate_hash1())`. + + :return: The hash transaction + :rtype: bytes + """ + part1 = self.calculate_hash1() + return self.calculate_hash2(part1) + + def update_hash(self) -> None: + """ Update the hash of the transaction. + """ + self.hash = self.calculate_hash() + + def is_nft_creation_standard(self) -> bool: + """Returns True if it's an NFT creation transaction""" + return False + + def is_standard(self, std_max_output_script_size: int = settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE, + only_standard_script_type: bool = True, + max_number_of_data_script_outputs: int = settings.MAX_DATA_SCRIPT_OUTPUTS) -> bool: + """ Return True is the transaction is standard + """ + # TODO in the future we should have a way to know which standard validation failed + # we could have an array of errors from args that we append an error object + # or a bool parameter "raise_on_non_standard", which will raise an error if it's non standard + + # First we check if t's an NFT standard + # We could remove this because now that we are adding support + # for some data script outputs in a transaction, this would + # also be considered a standard but if we change our minds + # about the data scripts in the future we would need to remember + # to add NFT support back, so I'm just keeping this here + if self.is_nft_creation_standard(): + return True + + # We've discussed to allow any number of Data Script outputs but we decided to + # add some restrictions first. Because of that we are not making Data Script a + # standard script and we are handling it manually + number_of_data_script_outputs = 0 + for output in self.outputs: + if not output.is_standard_script(std_max_output_script_size, only_standard_script_type): + # If not standard then we check if it's a data script with valid size + if output.is_script_size_valid(std_max_output_script_size) and output.is_data_script(): + # Then we check if it already reached the maximum number of data script outputs + if number_of_data_script_outputs == max_number_of_data_script_outputs: + return False + else: + number_of_data_script_outputs += 1 + continue + + return False + + return True + + +class TxInput: + _tx: BaseTransaction # XXX: used for caching on hathor.transaction.Transaction.get_spent_tx + + def __init__(self, tx_id: bytes, index: int, data: bytes) -> None: + """ + tx_id: hash of the transaction that contains the output of this input + index: index of the output you are spending from transaction tx_id (1 byte) + data: data to solve output script + """ + assert isinstance(tx_id, bytes), 'Value is %s, type %s' % (str(tx_id), type(tx_id)) + assert isinstance(index, int), 'Value is %s, type %s' % (str(index), type(index)) + assert isinstance(data, bytes), 'Value is %s, type %s' % (str(data), type(data)) + + self.tx_id = tx_id # bytes + self.index = index # int + self.data = data # bytes + + def __repr__(self) -> str: + return str(self) + + def __str__(self) -> str: + return 'TxInput(tx_id=%s, index=%s)' % (self.tx_id.hex(), self.index) + + def __bytes__(self) -> bytes: + """Returns a byte representation of the input + + :rtype: bytes + """ + ret = b'' + ret += self.tx_id + ret += int_to_bytes(self.index, 1) + ret += int_to_bytes(len(self.data), 2) # data length + ret += self.data + return ret + + def get_sighash_bytes(self, clear_data: bool) -> bytes: + """Return a serialization of the input for the sighash + + :return: Serialization of the input + :rtype: bytes + """ + if not clear_data: + return bytes(self) + else: + ret = bytearray() + ret += self.tx_id + ret += int_to_bytes(self.index, 1) + ret += int_to_bytes(0, 2) + return bytes(ret) + + @classmethod + def create_from_bytes(cls, buf: bytes) -> Tuple['TxInput', bytes]: + """ Creates a TxInput from a serialized input. Returns the input + and remaining bytes + """ + input_tx_id, buf = unpack_len(TX_HASH_SIZE, buf) + (input_index, data_len), buf = unpack('!BH', buf) + input_data, buf = unpack_len(data_len, buf) + txin = cls(input_tx_id, input_index, input_data) + return txin, buf + + def to_human_readable(self) -> Dict[str, Any]: + """Returns dict of Input information, ready to be serialized + + :rtype: Dict + """ + return { + 'tx_id': self.tx_id.hex(), # string + 'index': self.index, # int + 'data': + base64.b64encode(self.data).decode('utf-8') # string + } + + +class TxOutput: + + # first bit in the index byte indicates whether it's an authority output + TOKEN_INDEX_MASK = 0b01111111 + TOKEN_AUTHORITY_MASK = 0b10000000 + + # last bit is mint authority + TOKEN_MINT_MASK = 0b00000001 + # second to last bit is melt authority + TOKEN_MELT_MASK = 0b00000010 + + ALL_AUTHORITIES = TOKEN_MINT_MASK | TOKEN_MELT_MASK + + # standard types for output script + STANDARD_SCRIPT_TYPES = (P2PKH, MultiSig) + + def __init__(self, value: int, script: bytes, token_data: int = 0) -> None: + """ + value: amount spent (4 bytes) + script: script in bytes + token_data: index of the token uid in the uid list + """ + assert isinstance(value, int), 'value is %s, type %s' % (str(value), type(value)) + assert isinstance(script, bytes), 'script is %s, type %s' % (str(script), type(script)) + assert isinstance(token_data, int), 'token_data is %s, type %s' % (str(token_data), type(token_data)) + if value <= 0 or value > MAX_OUTPUT_VALUE: + raise InvalidOutputValue + + self.value = value # int + self.script = script # bytes + self.token_data = token_data # int + + def __repr__(self) -> str: + return str(self) + + def __str__(self) -> str: + value_str = bin(self.value) if self.is_token_authority() else str(self.value) + return ('TxOutput(token_data=%s, value=%s)' % (bin(self.token_data), value_str)) + + def __bytes__(self) -> bytes: + """Returns a byte representation of the output + + :rtype: bytes + """ + ret = b'' + ret += output_value_to_bytes(self.value) + ret += int_to_bytes(self.token_data, 1) + ret += int_to_bytes(len(self.script), 2) # script length + ret += self.script + return ret + + @classmethod + def create_from_bytes(cls, buf: bytes) -> Tuple['TxOutput', bytes]: + """ Creates a TxOutput from a serialized output. Returns the output + and remaining bytes + """ + value, buf = bytes_to_output_value(buf) + (token_data, script_len), buf = unpack('!BH', buf) + script, buf = unpack_len(script_len, buf) + txout = cls(value, script, token_data) + return txout, buf + + def get_token_index(self) -> int: + """The token uid index in the list""" + return self.token_data & self.TOKEN_INDEX_MASK + + def is_token_authority(self) -> bool: + """Whether this is a token authority output""" + return (self.token_data & self.TOKEN_AUTHORITY_MASK) > 0 + + def can_mint_token(self) -> bool: + """Whether this utxo can mint tokens""" + return self.is_token_authority() and ((self.value & self.TOKEN_MINT_MASK) > 0) + + def can_melt_token(self) -> bool: + """Whether this utxo can melt tokens""" + return self.is_token_authority() and ((self.value & self.TOKEN_MELT_MASK) > 0) + + def to_human_readable(self) -> Dict[str, Any]: + """Checks what kind of script this is and returns it in human readable form + """ + from hathorlib.scripts import parse_address_script + + script_type = parse_address_script(self.script) + if script_type: + ret = script_type.to_human_readable() + ret['value'] = self.value + ret['token_data'] = self.token_data + return ret + + return {} + + def to_json(self, *, decode_script: bool = False) -> Dict[str, Any]: + data: Dict[str, Any] = {} + data['value'] = self.value + data['token_data'] = self.token_data + data['script'] = base64.b64encode(self.script).decode('utf-8') + if decode_script: + data['decoded'] = self.to_human_readable() + return data + + def is_script_size_valid(self, max_output_script_size: int = settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE) -> bool: + """Return True if output script size is valid""" + if len(self.script) > max_output_script_size: + return False + + return True + + def is_data_script(self) -> bool: + """Return True if output script is a DataScript""" + return DataScript.parse_script(self.script) is not None + + def is_standard_script(self, std_max_output_script_size: int = settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE, + only_standard_script_type: bool = True) -> bool: + """Return True if this output has a standard script.""" + # First check: script size limit + if not self.is_script_size_valid(std_max_output_script_size): + return False + + # Second check: output script type + # if we allow different script types, then it's ok + # otherwise we check if it's one of the standard types + parsed_output = parse_address_script(self.script) + if only_standard_script_type: + if parsed_output is None or not isinstance(parsed_output, self.STANDARD_SCRIPT_TYPES): + return False + + return True + + +def bytes_to_output_value(buf: bytes) -> Tuple[int, bytes]: + (value_high_byte,), _ = unpack('!b', buf) + if value_high_byte < 0: + output_struct = '!q' + value_sign = -1 + else: + output_struct = '!i' + value_sign = 1 + try: + (signed_value,), buf = unpack(output_struct, buf) + except StructError as e: + raise InvalidOutputValue('Invalid byte struct for output') from e + value = signed_value * value_sign + assert value >= 0 + if value < _MAX_OUTPUT_VALUE_32 and value_high_byte < 0: + raise ValueError('Value fits in 4 bytes but is using 8 bytes') + return value, buf + + +def output_value_to_bytes(number: int) -> bytes: + if number <= 0: + raise InvalidOutputValue('Invalid value for output') + + if number > _MAX_OUTPUT_VALUE_32: + return (-number).to_bytes(8, byteorder='big', signed=True) + else: + return number.to_bytes(4, byteorder='big', signed=True) # `signed` makes no difference, but oh well + + +def tx_or_block_from_bytes(data: bytes) -> BaseTransaction: + """ Creates the correct tx subclass from a sequence of bytes + """ + # version field takes up the second byte only + version = data[1] + try: + tx_version = TxVersion(version) + cls = tx_version.get_cls() + return cls.create_from_struct(data) + except ValueError: + raise StructError('Invalid bytes to create transaction subclass.') diff --git a/hathorlib/hathorlib/block.py b/hathorlib/hathorlib/block.py new file mode 100644 index 000000000..d8a164043 --- /dev/null +++ b/hathorlib/hathorlib/block.py @@ -0,0 +1,114 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from struct import pack +from typing import Dict + +from hathorlib.base_transaction import BaseTransaction, TxOutput +from hathorlib.utils import int_to_bytes, unpack, unpack_len + +# Signal bits (B), version (B), outputs len (B) +_FUNDS_FORMAT_STRING = '!BBB' + +# Signal bits (B), version (B), inputs len (B) and outputs len (B) +_SIGHASH_ALL_FORMAT_STRING = '!BBBB' + + +class Block(BaseTransaction): + SERIALIZATION_NONCE_SIZE = 16 + + @property + def is_block(self) -> bool: + """Returns true if this is a block""" + return True + + @property + def is_transaction(self) -> bool: + """Returns true if this is a transaction""" + return False + + def _get_formatted_fields_dict(self, short: bool = True) -> Dict[str, str]: + d = super()._get_formatted_fields_dict(short) + if not short: + d.update(data=self.data.hex()) + return d + + @classmethod + def create_from_struct(cls, struct_bytes: bytes) -> 'Block': + blc = cls() + buf = blc.get_fields_from_struct(struct_bytes) + + if len(buf) < cls.SERIALIZATION_NONCE_SIZE: + raise ValueError('Invalid sequence of bytes') + + blc.nonce = int.from_bytes(buf[:cls.SERIALIZATION_NONCE_SIZE], byteorder='big') + buf = buf[cls.SERIALIZATION_NONCE_SIZE:] + + while buf: + buf = blc.get_header_from_bytes(buf) + + blc.hash = blc.calculate_hash() + + return blc + + def get_funds_fields_from_struct(self, buf: bytes) -> bytes: + """ Gets all funds fields for a block from a buffer. + + :param buf: Bytes of a serialized block + :type buf: bytes + + :return: A buffer containing the remaining struct bytes + :rtype: bytes + + :raises ValueError: when the sequence of bytes is incorect + """ + (self.signal_bits, self.version, outputs_len), buf = unpack(_FUNDS_FORMAT_STRING, buf) + + for _ in range(outputs_len): + txout, buf = TxOutput.create_from_bytes(buf) + self.outputs.append(txout) + + return buf + + def get_graph_fields_from_struct(self, buf: bytes) -> bytes: + """ Gets graph fields for a block from a buffer. + + :param buf: Bytes of a serialized transaction + :type buf: bytes + + :return: A buffer containing the remaining struct bytes + :rtype: bytes + + :raises ValueError: when the sequence of bytes is incorect + """ + buf = super().get_graph_fields_from_struct(buf) + (data_bytes,), buf = unpack('!B', buf) + self.data, buf = unpack_len(data_bytes, buf) + return buf + + def get_funds_struct(self) -> bytes: + """Return the funds data serialization of the block + + :return: funds data serialization of the block + :rtype: bytes + """ + struct_bytes = pack(_FUNDS_FORMAT_STRING, self.signal_bits, self.version, len(self.outputs)) + + for tx_output in self.outputs: + struct_bytes += bytes(tx_output) + + return struct_bytes + + def get_graph_struct(self) -> bytes: + """Return the graph data serialization of the block, without including the nonce field + + :return: graph data serialization of the transaction + :rtype: bytes + """ + struct_bytes_without_data = super().get_graph_struct() + data_bytes = int_to_bytes(len(self.data), 1) + return struct_bytes_without_data + data_bytes + self.data diff --git a/hathorlib/hathorlib/client.py b/hathorlib/hathorlib/client.py new file mode 100644 index 000000000..b3b4afacf --- /dev/null +++ b/hathorlib/hathorlib/client.py @@ -0,0 +1,190 @@ +# Copyright (c) Hathor Labs and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. +import json +import re +from typing import Any, Dict, List, NamedTuple, Optional, cast +from urllib.parse import urljoin + +from hathorlib.base_transaction import tx_or_block_from_bytes +from hathorlib.exceptions import PushTxFailed + +try: + from aiohttp import ClientSession + from structlog import get_logger +except ImportError as e: + raise ImportError('Missing dependency, please install extras: hathorlib[client]') from e + +from hathorlib import Block, TxOutput + +REQUIRED_HATHOR_API_VERSION = 'v1a' + +logger = get_logger() + + +# This regex was copied from https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string +semver_pattern = ( + r"(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)" + r"(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?" + r"(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?" +) +semver_re = re.compile(semver_pattern) + + +class BlockTemplate(NamedTuple): + """Block template.""" + + data: bytes + height: int + + def to_dict(self) -> Dict[str, Any]: + """Return dict for json serialization.""" + return { + 'data': self.data.hex(), + 'height': self.height, + } + + +class HathorVersion(NamedTuple): + """Hathor backend version.""" + + major: int + minor: int + patch: int + prerelease: Optional[str] = None + metadata: Optional[str] = None + + +class HathorClient: + """Used to communicate with Hathor's full-node.""" + + USER_AGENT = 'tx-mining-service' + + def __init__(self, server_url: str, api_version: str = REQUIRED_HATHOR_API_VERSION): + """Init HathorClient with a Hathor's full-node backend.""" + self.log = logger.new() + self._base_url = urljoin(server_url, api_version).rstrip('/') + '/' + self._base_headers = { + 'User-Agent': self.USER_AGENT, + } + self._session: Optional[ClientSession] = None + + async def start(self) -> None: + """Start a session with the backend.""" + self._session = ClientSession(headers=self._base_headers) + + async def stop(self) -> None: + """Stop a session with the backend.""" + if self._session is not None: + await self._session.close() + self._session = None + + def _get_url(self, url: str) -> str: + return urljoin(self._base_url, url.lstrip('/')) + + async def version(self) -> HathorVersion: + """Return the version of the backend.""" + assert self._session is not None + + async with self._session.get(self._get_url('version')) as resp: + data = await resp.json() + version = data['version'] + + match = semver_re.match(version) + + if match: + result = match.groupdict() + + return HathorVersion( + major=int(result['major']), + minor=int(result['minor']), + patch=int(result['patch']), + prerelease=result.get('prerelease'), + metadata=result.get('metadata'), + ) + else: + raise RuntimeError(f'Cannot parse version {version}') + + async def health(self) -> Dict[str, Any]: + """Return the health information of the backend.""" + assert self._session is not None + + async with self._session.get(self._get_url('health')) as resp: + data = await resp.text() + try: + parsed_json: Dict[str, Any] = json.loads(data) + except json.JSONDecodeError: + raise RuntimeError('Cannot parse health response: {}'.format(data)) + return parsed_json + + async def get_block_template(self, address: Optional[str] = None) -> BlockTemplate: + """Return a block template.""" + assert self._session is not None + params = {} + if address is not None: + params['address'] = address + resp = await self._session.get(self._get_url('get_block_template'), params=params) + if resp.status != 200: + self.log.error('Error getting block template', status=resp.status) + raise RuntimeError('Cannot get block template (status {})'.format(resp.status)) + + data = await resp.json() + + if data.get('error'): + self.log.error('Error getting block template', data=data) + raise RuntimeError('Cannot get block template') + + # Get height. + metadata = data.get('metadata', {}) + height = metadata['height'] + + # Build block. + blk = Block() + blk.signal_bits = data['signal_bits'] + blk.version = 0 + blk.timestamp = data['timestamp'] + blk.weight = data['weight'] + blk.parents = [bytes.fromhex(x) for x in data['parents']] + blk.data = b'' + + do = data['outputs'][0] + txout = TxOutput( + value=do['value'], + token_data=0, + script=b'', + ) + blk.outputs = [txout] + return BlockTemplate(data=bytes(blk), height=height) + + async def get_tx_parents(self) -> List[bytes]: + """Return parents for a new transaction.""" + assert self._session is not None + async with self._session.get(self._get_url('tx_parents')) as resp: + data = await resp.json() + if not data.get('success'): + raise RuntimeError('Cannot get tx parents') + return [bytes.fromhex(x) for x in data['tx_parents']] + + async def push_tx_or_block(self, raw: bytes) -> bool: + """Push a new tx or block to the backend.""" + assert self._session is not None + + tx = tx_or_block_from_bytes(raw) + + if tx.is_block: + data = {'hexdata': raw.hex()} + resp = await self._session.post(self._get_url('submit_block'), json=data) + else: + data = {'hex_tx': raw.hex()} + resp = await self._session.post(self._get_url('push_tx'), json=data) + + status = resp.status + if status > 299: + response = await resp.text() + self.log.error('Error pushing tx or block', response=response, status=status) + raise PushTxFailed('Cannot push tx or block') + + json = await resp.json() + + return cast(bool, json['result']) diff --git a/hathorlib/hathorlib/conf/__init__.py b/hathorlib/hathorlib/conf/__init__.py new file mode 100644 index 000000000..35b6d7f33 --- /dev/null +++ b/hathorlib/hathorlib/conf/__init__.py @@ -0,0 +1,5 @@ +from hathorlib.conf.get_settings import HathorSettings + +__all__ = [ + 'HathorSettings', +] diff --git a/hathorlib/hathorlib/conf/get_settings.py b/hathorlib/hathorlib/conf/get_settings.py new file mode 100644 index 000000000..e8de30930 --- /dev/null +++ b/hathorlib/hathorlib/conf/get_settings.py @@ -0,0 +1,26 @@ +import importlib +import os + +from hathorlib.conf.settings import HathorSettings as Settings + +_config_file = None + + +def HathorSettings() -> Settings: + """ Return configuration file namedtuple + Get the file from environment variable 'TXMINING_CONFIG_FILE' + If not set we return the config file of the mainnet + """ + global _config_file + # Import config file for network + default_file = 'hathorlib.conf.mainnet' + config_file = os.environ.get('TXMINING_CONFIG_FILE', default_file) + if _config_file is None: + _config_file = config_file + elif _config_file != config_file: + raise Exception('loading config twice with a different file') + try: + module = importlib.import_module(config_file) + except ModuleNotFoundError: + module = importlib.import_module(default_file) + return module.SETTINGS # type: ignore diff --git a/hathorlib/hathorlib/conf/mainnet.py b/hathorlib/hathorlib/conf/mainnet.py new file mode 100644 index 000000000..e856e6f6f --- /dev/null +++ b/hathorlib/hathorlib/conf/mainnet.py @@ -0,0 +1,14 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from hathorlib.conf.settings import HathorSettings + +SETTINGS = HathorSettings( + P2PKH_VERSION_BYTE=b'\x28', + MULTISIG_VERSION_BYTE=b'\x64', + NETWORK_NAME='mainnet', +) diff --git a/hathorlib/hathorlib/conf/settings.py b/hathorlib/hathorlib/conf/settings.py new file mode 100644 index 000000000..8aff63386 --- /dev/null +++ b/hathorlib/hathorlib/conf/settings.py @@ -0,0 +1,58 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from typing import NamedTuple + + +class HathorSettings(NamedTuple): + # Name of the network: "mainnet", "testnet-alpha", "testnet-bravo", ... + NETWORK_NAME: str + + # Version byte of the address in P2PKH + P2PKH_VERSION_BYTE: bytes + + # Version byte of the address in MultiSig + MULTISIG_VERSION_BYTE: bytes + + # HTR Token UID + HATHOR_TOKEN_UID: bytes = b'\x00' + + # Maximum number of characters in a token name + MAX_LENGTH_TOKEN_NAME: int = 30 + + # Maximum number of characters in a token symbol + MAX_LENGTH_TOKEN_SYMBOL: int = 5 + + # Name of the Hathor token + HATHOR_TOKEN_NAME: str = 'Hathor' + + # Symbol of the Hathor token + HATHOR_TOKEN_SYMBOL: str = 'HTR' + + # Number of decimal places for the Hathor token + DECIMAL_PLACES: int = 2 + + # Minimum weight of a tx + MIN_TX_WEIGHT: int = 14 + + # Multiplier coefficient to adjust the minimum weight of a normal tx to 18 + MIN_TX_WEIGHT_COEFFICIENT: float = 1.6 + + # Amount in which tx min weight reaches the middle point between the minimum and maximum weight + MIN_TX_WEIGHT_K: int = 100 + + # Maximum size of the tx output's script allowed for a tx to be standard + PUSHTX_MAX_OUTPUT_SCRIPT_SIZE: int = 256 + + # Maximum number of tx outputs of Data Script type + MAX_DATA_SCRIPT_OUTPUTS: int = 25 + + # Max length in bytes allowed for on-chain blueprint code after decompression, 240KB (not KiB) + NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_UNCOMPRESSED: int = 240_000 + + # Max length in bytes allowed for on-chain blueprint code inside the transaction, 24KB (not KiB) + NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_COMPRESSED: int = 24_000 diff --git a/hathorlib/hathorlib/conf/testnet.py b/hathorlib/hathorlib/conf/testnet.py new file mode 100644 index 000000000..488f5ad4a --- /dev/null +++ b/hathorlib/hathorlib/conf/testnet.py @@ -0,0 +1,14 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from hathorlib.conf.settings import HathorSettings + +SETTINGS = HathorSettings( + P2PKH_VERSION_BYTE=b'\x49', + MULTISIG_VERSION_BYTE=b'\x87', + NETWORK_NAME='testnet', +) diff --git a/hathorlib/hathorlib/daa.py b/hathorlib/hathorlib/daa.py new file mode 100644 index 000000000..186b75e73 --- /dev/null +++ b/hathorlib/hathorlib/daa.py @@ -0,0 +1,47 @@ +# Copyright (c) Hathor Labs and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +from math import log +from typing import TYPE_CHECKING + +from hathorlib.conf import HathorSettings + +if TYPE_CHECKING: + from hathorlib import Transaction + +settings = HathorSettings() + + +def minimum_tx_weight(tx: 'Transaction', *, fix_parents: bool = True) -> float: + """Return the minimum weight for the tx. + + The minimum is calculated by the following function: + + w = alpha * log(size, 2) + 4.0 + 4.0 + ---------------- + 1 + k / amount + """ + tx_size = len(tx.get_struct()) + + # When a transaction is still being create, it might not have its parents yet. + # In this case, the parents will be added later but we need to take their size + # into consideration to calculate the weight. + if fix_parents and len(tx.parents) < 2: + tx_size += 32 * (2 - len(tx.parents)) + + # We need to take into consideration the decimal places because it is inside the amount. + # For instance, if one wants to transfer 20 HTRs, the amount will be 2000. + # Max below is preventing division by 0 when handling authority methods that have no outputs + amount = max(1, tx.sum_outputs) / (10 ** settings.DECIMAL_PLACES) + + weight: float = ( + + settings.MIN_TX_WEIGHT_COEFFICIENT * log(tx_size, 2) + + 4 / (1 + settings.MIN_TX_WEIGHT_K / amount) + 4 + ) + + # Make sure the calculated weight is at least the minimum + weight = max(weight, settings.MIN_TX_WEIGHT) + + return weight diff --git a/hathorlib/hathorlib/exceptions.py b/hathorlib/hathorlib/exceptions.py new file mode 100644 index 000000000..7c525de10 --- /dev/null +++ b/hathorlib/hathorlib/exceptions.py @@ -0,0 +1,172 @@ +""" +Copyright 2019 Hathor Labs + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" + + +class HathorError(Exception): + """General error class""" + + +class InvalidAddress(HathorError): + """Address is invalid""" + + +class TxValidationError(HathorError): + """Base class for tx validation errors""" + + +class HathorClientError(HathorError): + """Base class for errors when communicating with the fullnode""" + + +class ParentDoesNotExist(TxValidationError): + """A parent does not exist""" + + +class IncorrectParents(TxValidationError): + """Wrong number of parents or confirming incorrect types of transactions: + - block: 3 parents: 1 block, 2 transactions + - tx: 2 parents, both transactions + """ + + +class TimestampError(TxValidationError): + """Transaction timestamp is smaller or equal to one parent's timestamp""" + + +class DoubleSpend(TxValidationError): + """Some input has already been spent""" + + +class InputOutputMismatch(TxValidationError): + """Input and output amounts are not equal""" + + +class InvalidInputData(TxValidationError): + """Input data does not solve output script correctly""" + + +class NoInputError(TxValidationError): + """There is not input""" + + +class TooManyInputs(TxValidationError): + """More than 256 inputs""" + + +class InexistentInput(TxValidationError): + """Input tx does not exist or index spent does not exist""" + + +class ConflictingInputs(TxValidationError): + """Inputs in the tx are spending the same output""" + + +class TooManyOutputs(TxValidationError): + """More than 256 outputs""" + + +class InvalidOutputValue(TxValidationError): + """Value of output is invalid""" + + +class PowError(TxValidationError): + """Proof-of-work is not correct""" + + +class AuxPowError(PowError): + """Auxiliary Proof-of-work is not correct""" + + +class WeightError(TxValidationError): + """Transaction not using correct weight""" + + +class DuplicatedParents(TxValidationError): + """Transaction has duplicated parents""" + + +class InvalidToken(TxValidationError): + """Token is not valid""" + + +class BlockError(TxValidationError): + """Base class for Block-specific errors""" + + +class TransactionDataError(TxValidationError): + """Block data max length exceeded""" + + +class RewardLocked(TxValidationError): + """Block reward cannot be spent yet, needs more confirmations""" + + +class BlockWithInputs(BlockError): + """Block has inputs""" + + +class BlockWithTokensError(BlockError): + """Block has tokens other than hathor""" + + +class ScriptError(HathorError): + """Base class for script evaluation errors""" + + +class OutOfData(ScriptError): + """PUSHDATA operation with more bytes than we have available""" + + +class MissingStackItems(ScriptError): + """Operation requires more items than what is on stack""" + + +class EqualVerifyFailed(ScriptError): + """OP_EQUALVERIFY failed""" + + +class FinalStackInvalid(ScriptError): + """Value left on stack is not true""" + + +class OracleChecksigFailed(ScriptError): + """Signature, public key and data don't match. Used mostly with nano contracts""" + + +class DataIndexError(ScriptError): + """The value for data at the given index does not exist. + + For example, if the data is of form 'value1:value2:value3' and we try to access value at index 5. + """ + + +class InvalidStackData(ScriptError): + """The value for data on the stack is not what we expect + + For example, we expect an integer but it's not + """ + + +class VerifyFailed(ScriptError): + """For all cases when there's a comparison that fails""" + + +class TimeLocked(ScriptError): + """Transaction is invalid because it is time locked""" + + +class PushTxFailed(HathorClientError): + """An attempt to push a tx/block to the fullnode failed""" diff --git a/hathorlib/hathorlib/headers/__init__.py b/hathorlib/hathorlib/headers/__init__.py new file mode 100644 index 000000000..c39de7ed1 --- /dev/null +++ b/hathorlib/hathorlib/headers/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathorlib.headers.base import VertexBaseHeader +from hathorlib.headers.deprecated_nano_header import DeprecatedNanoHeader +from hathorlib.headers.fee_header import FeeEntry, FeeHeader, FeeHeaderEntry +from hathorlib.headers.nano_header import NC_INITIALIZE_METHOD, NanoHeader +from hathorlib.headers.types import VertexHeaderId + +__all__ = [ + 'VertexBaseHeader', + 'VertexHeaderId', + 'NanoHeader', + 'DeprecatedNanoHeader', + 'FeeHeader', + 'FeeHeaderEntry', + 'FeeEntry', + 'NC_INITIALIZE_METHOD', +] diff --git a/hathorlib/hathorlib/headers/base.py b/hathorlib/hathorlib/headers/base.py new file mode 100644 index 000000000..e958ac473 --- /dev/null +++ b/hathorlib/hathorlib/headers/base.py @@ -0,0 +1,39 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from hathorlib.base_transaction import BaseTransaction + + +class VertexBaseHeader(ABC): + @classmethod + @abstractmethod + def deserialize(cls, tx: BaseTransaction, buf: bytes) -> tuple[VertexBaseHeader, bytes]: + """Deserialize header from `buf` which starts with header id.""" + raise NotImplementedError + + @abstractmethod + def serialize(self) -> bytes: + """Serialize header with header id as prefix.""" + raise NotImplementedError + + @abstractmethod + def get_sighash_bytes(self) -> bytes: + """Return sighash bytes to check digital signatures.""" + raise NotImplementedError diff --git a/hathorlib/hathorlib/headers/deprecated_nano_header.py b/hathorlib/hathorlib/headers/deprecated_nano_header.py new file mode 100644 index 000000000..5ad294d06 --- /dev/null +++ b/hathorlib/hathorlib/headers/deprecated_nano_header.py @@ -0,0 +1,129 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import deque +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from hathorlib.headers.base import VertexBaseHeader +from hathorlib.headers.types import VertexHeaderId +from hathorlib.utils import int_to_bytes, unpack, unpack_len + +if TYPE_CHECKING: + from hathorlib.base_transaction import BaseTransaction + from hathorlib.headers.nano_header import NanoHeader, NanoHeaderAction + + +NC_VERSION = 1 + + +@dataclass(frozen=True) +class DeprecatedNanoHeader(VertexBaseHeader): + tx: BaseTransaction + + # nc_id equals to the blueprint_id when a Nano Contract is being created. + # nc_id equals to the nanocontract_id when a method is being called. + nc_id: bytes + + # Name of the method to be called. When creating a new Nano Contract, it must be equal to 'initialize'. + nc_method: str + + # Serialized arguments to nc_method. + nc_args_bytes: bytes + + nc_actions: list[NanoHeaderAction] + + # Pubkey and signature of the transaction owner / caller. + nc_pubkey: bytes + nc_signature: bytes + + nc_version: int = NC_VERSION + + @classmethod + def deserialize(cls, tx: BaseTransaction, buf: bytes) -> tuple[DeprecatedNanoHeader, bytes]: + header_id, buf = buf[:1], buf[1:] + assert header_id == VertexHeaderId.NANO_HEADER.value + (nc_version,), buf = unpack('!B', buf) + if nc_version != NC_VERSION: + raise ValueError('unknown nanocontract version: {}'.format(nc_version)) + + nc_id, buf = unpack_len(32, buf) + (nc_method_len,), buf = unpack('!B', buf) + nc_method, buf = unpack_len(nc_method_len, buf) + (nc_args_bytes_len,), buf = unpack('!H', buf) + nc_args_bytes, buf = unpack_len(nc_args_bytes_len, buf) + + nc_actions: list[NanoHeaderAction] = [] + from hathorlib.nanocontracts import DeprecatedNanoContract + if not isinstance(tx, DeprecatedNanoContract): + (nc_actions_len,), buf = unpack('!B', buf) + for _ in range(nc_actions_len): + action, buf = NanoHeader._deserialize_action(buf) + nc_actions.append(action) + + (nc_pubkey_len,), buf = unpack('!B', buf) + nc_pubkey, buf = unpack_len(nc_pubkey_len, buf) + (nc_signature_len,), buf = unpack('!B', buf) + nc_signature, buf = unpack_len(nc_signature_len, buf) + + decoded_nc_method = nc_method.decode('ascii') + + return cls( + tx=tx, + nc_version=nc_version, + nc_id=nc_id, + nc_method=decoded_nc_method, + nc_args_bytes=nc_args_bytes, + nc_actions=nc_actions, + nc_pubkey=nc_pubkey, + nc_signature=nc_signature, + ), bytes(buf) + + def _serialize_without_header_id(self, *, skip_signature: bool) -> deque[bytes]: + """Serialize the header with the option to skip the signature.""" + encoded_method = self.nc_method.encode('ascii') + + ret: deque[bytes] = deque() + ret.append(int_to_bytes(NC_VERSION, 1)) + ret.append(self.nc_id) + ret.append(int_to_bytes(len(encoded_method), 1)) + ret.append(encoded_method) + ret.append(int_to_bytes(len(self.nc_args_bytes), 2)) + ret.append(self.nc_args_bytes) + + from hathorlib.nanocontracts import DeprecatedNanoContract + if not isinstance(self.tx, DeprecatedNanoContract): + ret.append(int_to_bytes(len(self.nc_actions), 1)) + for action in self.nc_actions: + ret.append(NanoHeader._serialize_action(action)) + + ret.append(int_to_bytes(len(self.nc_pubkey), 1)) + ret.append(self.nc_pubkey) + if not skip_signature: + ret.append(int_to_bytes(len(self.nc_signature), 1)) + ret.append(self.nc_signature) + else: + ret.append(int_to_bytes(0, 1)) + return ret + + def serialize(self) -> bytes: + ret = self._serialize_without_header_id(skip_signature=False) + ret.appendleft(VertexHeaderId.NANO_HEADER.value) + return b''.join(ret) + + def get_sighash_bytes(self) -> bytes: + ret = self._serialize_without_header_id(skip_signature=True) + return b''.join(ret) diff --git a/hathorlib/hathorlib/headers/fee_header.py b/hathorlib/hathorlib/headers/fee_header.py new file mode 100644 index 000000000..6666a4cbb --- /dev/null +++ b/hathorlib/hathorlib/headers/fee_header.py @@ -0,0 +1,94 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from hathorlib.headers.base import VertexBaseHeader +from hathorlib.headers.types import VertexHeaderId +from hathorlib.utils import int_to_bytes, unpack + +if TYPE_CHECKING: + from hathorlib.base_transaction import BaseTransaction + from hathorlib.transaction import Transaction + + +@dataclass(frozen=True) +class FeeHeaderEntry: + token_index: int + amount: int + + +@dataclass(frozen=True) +class FeeEntry: + token_uid: bytes + amount: int + + +@dataclass(frozen=True) +class FeeHeader(VertexBaseHeader): + tx: Transaction + fees: list[FeeHeaderEntry] + + @classmethod + def deserialize(cls, tx: BaseTransaction, buf: bytes) -> tuple[FeeHeader, bytes]: + from hathorlib.base_transaction import bytes_to_output_value + + header_id, buf = buf[:1], buf[1:] + assert header_id == VertexHeaderId.FEE_HEADER.value + + fees: list[FeeHeaderEntry] = [] + (fees_len,), buf = unpack('!B', buf) + + for _ in range(fees_len): + (token_index,), buf = unpack('!B', buf) + amount, buf = bytes_to_output_value(buf) + fees.append(FeeHeaderEntry( + token_index=token_index, + amount=amount, + )) + from hathorlib.transaction import Transaction + assert isinstance(tx, Transaction) + return cls( + tx=tx, + fees=fees, + ), bytes(buf) + + def serialize(self) -> bytes: + from hathorlib.base_transaction import output_value_to_bytes + + ret = [ + VertexHeaderId.FEE_HEADER.value, + int_to_bytes(len(self.fees), 1) + ] + + for fee in self.fees: + ret.append(int_to_bytes(fee.token_index, 1)) + ret.append(output_value_to_bytes(fee.amount)) + + return b''.join(ret) + + def get_sighash_bytes(self) -> bytes: + return self.serialize() + + def get_fees(self) -> list[FeeEntry]: + return [ + FeeEntry( + token_uid=self.tx.get_token_uid(fee.token_index), + amount=fee.amount + ) + for fee in self.fees + ] diff --git a/hathorlib/hathorlib/headers/nano_header.py b/hathorlib/hathorlib/headers/nano_header.py new file mode 100644 index 000000000..d2ed9ecc2 --- /dev/null +++ b/hathorlib/hathorlib/headers/nano_header.py @@ -0,0 +1,162 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from collections import deque +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from hathorlib.headers.base import VertexBaseHeader +from hathorlib.headers.types import VertexHeaderId +from hathorlib.utils import decode_unsigned, encode_unsigned, int_to_bytes, unpack, unpack_len + +if TYPE_CHECKING: + from hathorlib.base_transaction import BaseTransaction + from hathorlib.nanocontracts.types import NCActionType + +NC_INITIALIZE_METHOD = 'initialize' +ADDRESS_LEN_BYTES = 25 +ADDRESS_SEQNUM_SIZE: int = 8 # bytes +_NC_SCRIPT_LEN_MAX_BYTES: int = 2 + + +@dataclass(frozen=True) +class NanoHeaderAction: + type: 'NCActionType' + token_index: int + amount: int + + +@dataclass(frozen=True) +class NanoHeader(VertexBaseHeader): + tx: BaseTransaction + + # Sequence number for the caller. + nc_seqnum: int + + # nc_id equals to the blueprint_id when a Nano Contract is being created. + # nc_id equals to the nanocontract_id when a method is being called. + nc_id: bytes + + # Name of the method to be called. When creating a new Nano Contract, it must be equal to 'initialize'. + nc_method: str + + # Serialized arguments to nc_method. + nc_args_bytes: bytes + + nc_actions: list[NanoHeaderAction] + + # Address and script with signature(s) of the transaction owner(s)/caller(s). Supports P2PKH and P2SH. + nc_address: bytes + nc_script: bytes + + @classmethod + def _deserialize_action(cls, buf: bytes) -> tuple[NanoHeaderAction, bytes]: + from hathorlib.base_transaction import bytes_to_output_value + from hathorlib.nanocontracts.types import NCActionType + + type_bytes, buf = buf[:1], buf[1:] + action_type = NCActionType.from_bytes(type_bytes) + (token_index,), buf = unpack('!B', buf) + amount, buf = bytes_to_output_value(buf) + return NanoHeaderAction( + type=action_type, + token_index=token_index, + amount=amount, + ), buf + + @classmethod + def deserialize(cls, tx: BaseTransaction, buf: bytes) -> tuple[NanoHeader, bytes]: + from hathorlib.nanocontracts import DeprecatedNanoContract + + header_id, buf = buf[:1], buf[1:] + assert header_id == VertexHeaderId.NANO_HEADER.value + + nc_id, buf = unpack_len(32, buf) + nc_seqnum, buf = decode_unsigned(buf, max_bytes=ADDRESS_SEQNUM_SIZE) + (nc_method_len,), buf = unpack('!B', buf) + nc_method, buf = unpack_len(nc_method_len, buf) + (nc_args_bytes_len,), buf = unpack('!H', buf) + nc_args_bytes, buf = unpack_len(nc_args_bytes_len, buf) + + nc_actions: list[NanoHeaderAction] = [] + if not isinstance(tx, DeprecatedNanoContract): + (nc_actions_len,), buf = unpack('!B', buf) + for _ in range(nc_actions_len): + action, buf = cls._deserialize_action(buf) + nc_actions.append(action) + + nc_address, buf = unpack_len(ADDRESS_LEN_BYTES, buf) + nc_script_len, buf = decode_unsigned(buf, max_bytes=_NC_SCRIPT_LEN_MAX_BYTES) + nc_script, buf = unpack_len(nc_script_len, buf) + + decoded_nc_method = nc_method.decode('ascii') + + return cls( + tx=tx, + nc_seqnum=nc_seqnum, + nc_id=nc_id, + nc_method=decoded_nc_method, + nc_args_bytes=nc_args_bytes, + nc_actions=nc_actions, + nc_address=nc_address, + nc_script=nc_script, + ), bytes(buf) + + @staticmethod + def _serialize_action(action: NanoHeaderAction) -> bytes: + from hathorlib.base_transaction import output_value_to_bytes + ret = [ + action.type.to_bytes(), + int_to_bytes(action.token_index, 1), + output_value_to_bytes(action.amount), + ] + return b''.join(ret) + + def _serialize_without_header_id(self, *, skip_signature: bool) -> deque[bytes]: + """Serialize the header with the option to skip the signature.""" + from hathorlib.nanocontracts import DeprecatedNanoContract + + encoded_method = self.nc_method.encode('ascii') + + ret: deque[bytes] = deque() + ret.append(self.nc_id) + ret.append(encode_unsigned(self.nc_seqnum, max_bytes=ADDRESS_SEQNUM_SIZE)) + ret.append(int_to_bytes(len(encoded_method), 1)) + ret.append(encoded_method) + ret.append(int_to_bytes(len(self.nc_args_bytes), 2)) + ret.append(self.nc_args_bytes) + + if not isinstance(self.tx, DeprecatedNanoContract): + ret.append(int_to_bytes(len(self.nc_actions), 1)) + for action in self.nc_actions: + ret.append(self._serialize_action(action)) + + ret.append(self.nc_address) + if not skip_signature: + ret.append(encode_unsigned(len(self.nc_script), max_bytes=_NC_SCRIPT_LEN_MAX_BYTES)) + ret.append(self.nc_script) + else: + ret.append(encode_unsigned(0, max_bytes=_NC_SCRIPT_LEN_MAX_BYTES)) + return ret + + def serialize(self) -> bytes: + ret = self._serialize_without_header_id(skip_signature=False) + ret.appendleft(VertexHeaderId.NANO_HEADER.value) + return b''.join(ret) + + def get_sighash_bytes(self) -> bytes: + ret = self._serialize_without_header_id(skip_signature=True) + return b''.join(ret) diff --git a/hathorlib/hathorlib/headers/types.py b/hathorlib/hathorlib/headers/types.py new file mode 100644 index 000000000..7b45b8a8e --- /dev/null +++ b/hathorlib/hathorlib/headers/types.py @@ -0,0 +1,21 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum, unique + + +@unique +class VertexHeaderId(Enum): + NANO_HEADER = b'\x10' + FEE_HEADER = b'\x11' diff --git a/hathorlib/hathorlib/nanocontracts/__init__.py b/hathorlib/hathorlib/nanocontracts/__init__.py new file mode 100644 index 000000000..563962b7b --- /dev/null +++ b/hathorlib/hathorlib/nanocontracts/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathorlib.nanocontracts.nanocontract import DeprecatedNanoContract +from hathorlib.nanocontracts.on_chain_blueprint import OnChainBlueprint + +__all__ = [ + 'DeprecatedNanoContract', + 'OnChainBlueprint', +] diff --git a/hathorlib/hathorlib/nanocontracts/nanocontract.py b/hathorlib/hathorlib/nanocontracts/nanocontract.py new file mode 100644 index 000000000..3f6b0c5ec --- /dev/null +++ b/hathorlib/hathorlib/nanocontracts/nanocontract.py @@ -0,0 +1,62 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathorlib import Transaction, TxVersion + + +class DeprecatedNanoContract(Transaction): + """NanoContract vertex to be placed on the DAG of transactions.""" + + def __init__(self) -> None: + super().__init__() + + self.version = TxVersion.NANO_CONTRACT + + # nc_id equals to the blueprint_id when a Nano Contract is being created. + # nc_id equals to the nanocontract_id when a method is being called. + self.nc_id: bytes = b'' + + # Name of the method to be called. When creating a new Nano Contract, it must be equal to 'initialize'. + self.nc_method: str = '' + + # Serialized arguments to nc_method. + self.nc_args_bytes: bytes = b'' + + # Pubkey and signature of the transaction owner / caller. + self.nc_pubkey: bytes = b'' + self.nc_signature: bytes = b'' + + ################################ + # Methods for Transaction + ################################ + + def get_funds_fields_from_struct(self, buf: bytes) -> bytes: + from hathorlib.headers import DeprecatedNanoHeader, VertexHeaderId + buf = super().get_funds_fields_from_struct(buf) + nano_header, buf = DeprecatedNanoHeader.deserialize(self, VertexHeaderId.NANO_HEADER.value + buf) + self.headers.append(nano_header) + return buf + + def get_funds_struct(self) -> bytes: + from hathorlib.headers import DeprecatedNanoHeader + struct_bytes = super().get_funds_struct() + nano_header_bytes = self._get_header(DeprecatedNanoHeader).serialize() + struct_bytes += nano_header_bytes[1:] + return struct_bytes + + def get_headers_hash(self) -> bytes: + return b'' + + def get_headers_struct(self) -> bytes: + return b'' diff --git a/hathorlib/hathorlib/nanocontracts/on_chain_blueprint.py b/hathorlib/hathorlib/nanocontracts/on_chain_blueprint.py new file mode 100644 index 000000000..d3aea741b --- /dev/null +++ b/hathorlib/hathorlib/nanocontracts/on_chain_blueprint.py @@ -0,0 +1,139 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import IntEnum, unique +from typing import NamedTuple + +from hathorlib.conf import HathorSettings +from hathorlib.transaction import Transaction +from hathorlib.utils import int_to_bytes, unpack, unpack_len + +settings = HathorSettings() + +# used to allow new versions of the serialization format in the future +ON_CHAIN_BLUEPRINT_VERSION: int = 1 + + +@unique +class CodeKind(IntEnum): + """ Represents what type of code and format is being used, to allow new code/compression types in the future. + """ + + PYTHON_ZLIB = 1 + + def __bytes__(self) -> bytes: + return int_to_bytes(number=self.value, size=1) + + +class Code(NamedTuple): + """ Store the code object in memory, along with helper methods. + """ + + # determines how the content will be interpreted + kind: CodeKind + + # the encoded content, usually encoded implies compressed + data: bytes + + def __bytes__(self) -> bytes: + # Code serialization format: [kind:variable bytes][null byte][data:variable bytes] + if self.kind is not CodeKind.PYTHON_ZLIB: + raise ValueError('Invalid code kind value') + buf = bytearray() + buf.extend(bytes(self.kind)) + buf.extend(self.data) + return bytes(buf) + + @classmethod + def from_bytes(cls, data: bytes) -> 'Code': + """ Parses a Code instance from a byte sequence, the length of the data is encoded outside of this class. + + NOTE: This will not validate whether the encoded has a valid compression format. A Validator must be used to + check that. + """ + data_arr = bytearray(data) + kind = CodeKind(data_arr[0]) + if kind is not CodeKind.PYTHON_ZLIB: + raise ValueError('Code kind not supported') + compressed_code = data_arr[1:] + return cls(kind, bytes(compressed_code)) + + +class OnChainBlueprint(Transaction): + """On-chain blueprint vertex to be placed on the DAG of transactions.""" + + MIN_NUM_INPUTS = 0 + + def __init__(self) -> None: + super().__init__() + + # Pubkey and signature of the transaction owner / caller. + self.nc_pubkey: bytes = b'' + self.nc_signature: bytes = b'' + + self.code: Code = Code(CodeKind.PYTHON_ZLIB, b'') + + def serialize_code(self) -> bytes: + """Serialization of self.code, to be used for the serialization of this transaction type.""" + buf = bytearray() + buf.extend(int_to_bytes(ON_CHAIN_BLUEPRINT_VERSION, 1)) + serialized_code = bytes(self.code) + buf.extend(int_to_bytes(len(serialized_code), 4)) + buf.extend(serialized_code) + return bytes(buf) + + @classmethod + def deserialize_code(_cls, buf: bytes) -> tuple[Code, bytes]: + """Parses the self.code field, returns the parse result and the remaining bytes.""" + (ocb_version,), buf = unpack('!B', buf) + if ocb_version != ON_CHAIN_BLUEPRINT_VERSION: + raise ValueError(f'unknown on-chain blueprint version: {ocb_version}') + + (serialized_code_len,), buf = unpack('!L', buf) + max_serialized_code_len = settings.NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_COMPRESSED + if serialized_code_len > max_serialized_code_len: + raise ValueError(f'compressed code data is too large: {serialized_code_len} > {max_serialized_code_len}') + serialized_code, buf = unpack_len(serialized_code_len, buf) + code = Code.from_bytes(serialized_code) + return code, buf + + def _serialize_ocb(self, *, skip_signature: bool = False) -> bytes: + buf = bytearray() + buf += self.serialize_code() + buf += int_to_bytes(len(self.nc_pubkey), 1) + buf += self.nc_pubkey + if not skip_signature: + buf += int_to_bytes(len(self.nc_signature), 1) + buf += self.nc_signature + else: + buf += int_to_bytes(0, 1) + return bytes(buf) + + def get_funds_struct(self) -> bytes: + struct_bytes = super().get_funds_struct() + struct_bytes += self._serialize_ocb() + return struct_bytes + + def get_funds_fields_from_struct(self, buf: bytes) -> bytes: + buf = super().get_funds_fields_from_struct(buf) + + code, buf = OnChainBlueprint.deserialize_code(buf) + self.code = code + + (nc_pubkey_len,), buf = unpack('!B', buf) + self.nc_pubkey, buf = unpack_len(nc_pubkey_len, buf) + (nc_signature_len,), buf = unpack('!B', buf) + self.nc_signature, buf = unpack_len(nc_signature_len, buf) + + return buf diff --git a/hathorlib/hathorlib/nanocontracts/types.py b/hathorlib/hathorlib/nanocontracts/types.py new file mode 100644 index 000000000..15341ba35 --- /dev/null +++ b/hathorlib/hathorlib/nanocontracts/types.py @@ -0,0 +1,38 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from enum import Enum, unique + +from hathorlib.utils import bytes_to_int, int_to_bytes + + +@unique +class NCActionType(Enum): + """Types of interactions a transaction might have with a contract.""" + DEPOSIT = 1 + WITHDRAWAL = 2 + GRANT_AUTHORITY = 3 + ACQUIRE_AUTHORITY = 4 + + def __str__(self) -> str: + return self.name.lower() + + def to_bytes(self) -> bytes: + return int_to_bytes(number=self.value, size=1) + + @staticmethod + def from_bytes(data: bytes) -> NCActionType: + return NCActionType(bytes_to_int(data)) diff --git a/hathorlib/hathorlib/py.typed b/hathorlib/hathorlib/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/hathorlib/hathorlib/scripts.py b/hathorlib/hathorlib/scripts.py new file mode 100644 index 000000000..a1088ed15 --- /dev/null +++ b/hathorlib/hathorlib/scripts.py @@ -0,0 +1,533 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import re +import struct +from abc import ABC, abstractmethod +from enum import IntEnum +from typing import Any, Dict, List, Match, Optional, Pattern, Type, Union + +from hathorlib.conf import HathorSettings +from hathorlib.exceptions import ScriptError +from hathorlib.utils import ( + decode_address, + get_address_b58_from_public_key_hash, + get_address_b58_from_redeem_script_hash, +) + +settings = HathorSettings() + + +def re_compile(pattern: str) -> Pattern[bytes]: + """ Transform a given script pattern into a regular expression. + + The script pattern is like a regular expression, but you may include five + special symbols: + (i) OP_DUP, OP_HASH160, and all other opcodes; + (ii) DATA_: data with the specified length; + (iii) NUMBER: a 4-byte integer; + (iv) BLOCK: a variable length block, to be parsed later + + Example: + >>> r = re_compile( + ... '^(?:DATA_4 OP_GREATERTHAN_TIMESTAMP)? ' + ... 'OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKSIG$' + ... ) + + :return: A compiled regular expression matcher + :rtype: :py:class:`re.Pattern` + """ + + def _to_byte_pattern(m: Match[bytes]) -> bytes: + x = m.group().decode('ascii').strip() + if x.startswith('OP_'): + return bytes([Opcode[x]]) + elif x.startswith('DATA_'): + length = int(m.group()[5:]) + return _re_pushdata(length) + elif x.startswith('NUMBER'): + return b'.{5}' + elif x.startswith('BLOCK'): + return b'.*' + else: + raise ValueError('Invalid opcode: {}'.format(x)) + + p = pattern.encode('ascii') + p = re.sub(rb'\s*([A-Z0-9_]+)\s*', _to_byte_pattern, p) + return re.compile(p, re.DOTALL) + + +def _re_pushdata(length: int) -> bytes: + """ Create a regular expression that matches a data block with a given length. + + :return: A non-compiled regular expression + :rtype: bytes + """ + ret = [bytes([Opcode.OP_PUSHDATA1]), bytes([length]), b'.{', str(length).encode('ascii'), b'}'] + + if length <= 75: + # for now, we accept <= 75 bytes with OP_PUSHDATA1. It's optional + ret.insert(1, b'?') + + return b''.join(ret) + + +class Opcode(IntEnum): + OP_0 = 0x50 + OP_1 = 0x51 + OP_2 = 0x52 + OP_3 = 0x53 + OP_4 = 0x54 + OP_5 = 0x55 + OP_6 = 0x56 + OP_7 = 0x57 + OP_8 = 0x58 + OP_9 = 0x59 + OP_10 = 0x5a + OP_11 = 0x5b + OP_12 = 0x5c + OP_13 = 0x5d + OP_14 = 0x5e + OP_15 = 0x5f + OP_16 = 0x60 + OP_DUP = 0x76 + OP_EQUAL = 0x87 + OP_EQUALVERIFY = 0x88 + OP_CHECKSIG = 0xAC + OP_HASH160 = 0xA9 + OP_PUSHDATA1 = 0x4C + OP_GREATERTHAN_TIMESTAMP = 0x6F + OP_CHECKMULTISIG = 0xAE + OP_CHECKDATASIG = 0xBA + OP_DATA_STREQUAL = 0xC0 + OP_DATA_GREATERTHAN = 0xC1 + OP_FIND_P2PKH = 0xD0 + OP_DATA_MATCH_VALUE = 0xD1 + + +class HathorScript: + """This class is supposes to being a helper creating the scripts. It abstracts + some of the corner cases when building the script. + + For eg, when pushing data to the stack, we may or may not have to use OP_PUSHDATA. + This is the sequence we have to add to the script: + - len(data) <= 75: [len(data) data] + - len(data) > 75: [OP_PUSHDATA1 len(data) data] + + pushData abstracts this differences and presents an unique interface. + """ + + def __init__(self) -> None: + self.data = b'' + + def addOpcode(self, opcode: Opcode) -> None: + self.data += bytes([opcode]) + + def pushData(self, data: Union[int, bytes]) -> None: + if isinstance(data, int): + if data > 4294967295: + n = struct.pack('!Q', data) + elif data > 65535: + n = struct.pack('!I', data) + elif data > 255: + n = struct.pack('!H', data) + else: + n = struct.pack('!B', data) + data = n + if len(data) <= 75: + self.data += (bytes([len(data)]) + data) + else: + self.data += (bytes([Opcode.OP_PUSHDATA1]) + bytes([len(data)]) + data) + + +class BaseScript(ABC): + """ + This class holds common methods for different script types to help abstracting the script type. + """ + + @abstractmethod + def to_human_readable(self) -> Dict[str, Any]: + """Return a nice dict for using on informational json APIs.""" + raise NotImplementedError + + @classmethod + @abstractmethod + def parse_script(cls, script: bytes) -> Optional['BaseScript']: + """Try to parse script into one of the subclasses. Return None if can't parse""" + raise NotImplementedError + + @abstractmethod + def get_type(self) -> str: + """Return readable script type""" + raise NotImplementedError + + @abstractmethod + def get_script(self) -> bytes: + """Return script in bytes""" + raise NotImplementedError + + +class P2PKH(BaseScript): + re_match = re_compile('^(?:(DATA_4) OP_GREATERTHAN_TIMESTAMP)? ' + 'OP_DUP OP_HASH160 (DATA_20) OP_EQUALVERIFY OP_CHECKSIG$') + + def __init__(self, address: str, timelock: Optional[int] = None) -> None: + """This class represents the pay to public hash key script. It enables the person + who has the corresponding private key of the address to spend the tokens. + + This script validates the signature and public key on the corresponding input + data. The public key is first checked against the script address and then the + signature is verified, which means the sender owns the corresponding private key. + + Output script and the corresponding input data are usually represented like: + input data: OP_DUP OP_HASH160 OP_EQUALVERIFY OP_CHECKSIG + output script: + + :param address: address to send tokens + :type address: string(base58) + + :param timelock: timestamp until when it's locked + :type timelock: int + """ + self.address = address + self.timelock = timelock + + def to_human_readable(self) -> Dict[str, Any]: + ret: Dict[str, Any] = {} + ret['type'] = self.get_type() + ret['address'] = self.address + ret['timelock'] = self.timelock + return ret + + def get_type(self) -> str: + return 'P2PKH' + + @classmethod + def create_output_script(cls, address: bytes, timelock: Optional[Any] = None) -> bytes: + """ + :param address: address to send tokens + :type address: bytes + + :param timelock: timestamp until when the output is locked + :type timelock: bytes + + :rtype: bytes + """ + assert len(address) == 25 + public_key_hash = address[1:-4] + s = HathorScript() + if timelock: + s.pushData(timelock) + s.addOpcode(Opcode.OP_GREATERTHAN_TIMESTAMP) + s.addOpcode(Opcode.OP_DUP) + s.addOpcode(Opcode.OP_HASH160) + s.pushData(public_key_hash) + s.addOpcode(Opcode.OP_EQUALVERIFY) + s.addOpcode(Opcode.OP_CHECKSIG) + return s.data + + def get_script(self) -> bytes: + return P2PKH.create_output_script(decode_address(self.address), self.timelock) + + @classmethod + def create_input_data(cls, public_key_bytes: bytes, signature: bytes) -> bytes: + """ + :param private_key: key corresponding to the address we want to spend tokens from + :type private_key: :py:class:`cryptography.hazmat.primitives.asymmetric.ec.EllipticCurvePrivateKey` + + :rtype: bytes + """ + s = HathorScript() + s.pushData(signature) + s.pushData(public_key_bytes) + return s.data + + @classmethod + def parse_script(cls, script: bytes) -> Optional['P2PKH']: + """Checks if the given script is of type p2pkh. If it is, returns the P2PKH object. + Otherwise, returns None. + + :param script: script to check + :type script: bytes + + :rtype: :py:class:`hathor.transaction.scripts.P2PKH` or None + """ + match = cls.re_match.search(script) + if match: + groups = match.groups() + timelock = None + pushdata_timelock = groups[0] + if pushdata_timelock: + timelock_bytes = pushdata_timelock[1:] + timelock = struct.unpack('!I', timelock_bytes)[0] + pushdata_address = groups[1] + public_key_hash = get_pushdata(pushdata_address) + address_b58 = get_address_b58_from_public_key_hash(public_key_hash) + return cls(address_b58, timelock) + return None + + +# TODO: `IAddress` class for defining the common interface of `Union[MultiSig, P2PKH]` +class MultiSig(BaseScript): + re_match = re_compile('^(?:(DATA_4) OP_GREATERTHAN_TIMESTAMP)? ' 'OP_HASH160 (DATA_20) OP_EQUAL$') + + def __init__(self, address: str, timelock: Optional[Any] = None) -> None: + """This class represents the multi signature script (MultiSig). It enables the group of persons + who has the corresponding private keys of the address to spend the tokens. + + This script validates the signatures and public keys on the corresponding input + data. + + Output script and the corresponding input data are usually represented like: + output script: OP_HASH160 OP_EQUAL + input data: ... + + :param address: address to send tokens + :type address: string(base58) + + :param timelock: timestamp until when it's locked + :type timelock: int + """ + self.address = address + self.timelock = timelock + + def to_human_readable(self) -> Dict[str, Any]: + """ Decode MultiSig class to dict with its type and data + + :return: Dict with MultiSig info + :rtype: Dict[str:] + """ + ret: Dict[str, Any] = {} + ret['type'] = self.get_type() + ret['address'] = self.address + ret['timelock'] = self.timelock + return ret + + def get_type(self) -> str: + return 'MultiSig' + + @classmethod + def create_output_script(cls, address: bytes, timelock: Optional[Any] = None) -> bytes: + """ + :param address: address to send tokens + :type address: bytes + + :param timelock: timestamp until when the output is locked + :type timelock: bytes + + :rtype: bytes + """ + assert len(address) == 25 + redeem_script_hash = address[1:-4] + s = HathorScript() + if timelock: + s.pushData(timelock) + s.addOpcode(Opcode.OP_GREATERTHAN_TIMESTAMP) + s.addOpcode(Opcode.OP_HASH160) + s.pushData(redeem_script_hash) + s.addOpcode(Opcode.OP_EQUAL) + return s.data + + def get_script(self) -> bytes: + return MultiSig.create_output_script(decode_address(self.address), self.timelock) + + @classmethod + def create_input_data(cls, redeem_script: bytes, signatures: List[bytes]) -> bytes: + """ + :param redeem_script: script to redeem the tokens: ... + :type redeem_script: bytes + + :param signatures: array of signatures to validate the input and redeem the tokens + :type signagures: List[bytes] + + :rtype: bytes + """ + s = HathorScript() + for signature in signatures: + s.pushData(signature) + s.pushData(redeem_script) + return s.data + + @classmethod + def parse_script(cls, script: bytes) -> Optional['MultiSig']: + """Checks if the given script is of type multisig. If it is, returns the MultiSig object. + Otherwise, returns None. + + :param script: script to check + :type script: bytes + + :rtype: :py:class:`hathor.transaction.scripts.MultiSig` or None + """ + match = cls.re_match.search(script) + if match: + groups = match.groups() + timelock = None + pushdata_timelock = groups[0] + if pushdata_timelock: + timelock_bytes = pushdata_timelock[1:] + timelock = struct.unpack('!I', timelock_bytes)[0] + redeem_script_hash = get_pushdata(groups[1]) + address_b58 = get_address_b58_from_redeem_script_hash(redeem_script_hash) + return cls(address_b58, timelock) + return None + + +class DataScript(BaseScript): + def __init__(self, data: str) -> None: + """This class represents a data script usually used by NFT transactions. + The script has a data field and ends with an OP_CHECKSIG so it can't be spent. + + The script format is: + + :param data: data string to be stored in the script + :type data: string + """ + self.data = data + + def to_human_readable(self) -> Dict[str, Any]: + """ Decode DataScript class with type and data + + :return: Dict with ScriptData info + :rtype: Dict[str:] + """ + ret: Dict[str, Any] = {} + ret['type'] = self.get_type() + ret['data'] = self.data + return ret + + def get_type(self) -> str: + return 'Data' + + def get_script(self) -> bytes: + return DataScript.create_output_script(self.data) + + @classmethod + def create_output_script(cls, data: str) -> bytes: + """ + :param data: Data to be stored in the script + :type data: string + + :rtype: bytes + """ + s = HathorScript() + s.pushData(data.encode('utf-8')) + s.addOpcode(Opcode.OP_CHECKSIG) + return s.data + + @classmethod + def parse_script(cls, script: bytes) -> Optional['DataScript']: + """Checks if the given script is of type data script. If it is, returns the DataScript object. + Otherwise, returns None. + + :param script: script to check + :type script: bytes + + :rtype: :py:class:`hathor.transaction.scripts.DataScript` or None + """ + if len(script) < 2: + # At least 1 byte for len data and 1 byte for OP_CHECKSIG + return None + + # The expected len will be at least 2 bytes + # 1 for the script len and 1 for the OP_CHECKSIG in the end + expected_script_len = 2 + + if script[0] == Opcode.OP_PUSHDATA1: + expected_script_len += 1 + data_bytes_len = script[1] + else: + data_bytes_len = script[0] + + expected_script_len += data_bytes_len + + if expected_script_len != len(script): + # Script is not a DataScript + return None + + if script[-1] != Opcode.OP_CHECKSIG: + # Last script byte must be an OP_CHECKSIG + return None + + # Get the data from the script + data = get_pushdata(script) + + try: + decoded_str = data.decode('utf-8') + return cls(decoded_str) + except UnicodeDecodeError: + return None + + +def create_output_script(address: bytes, timelock: Optional[Any] = None) -> bytes: + """ Verifies if address is P2PKH or Multisig and create correct output script + + :param address: address to send tokens + :type address: bytes + + :param timelock: timestamp until when the output is locked + :type timelock: bytes + + :raises ScriptError: if address is not from one of the possible options + + :rtype: bytes + """ + if address[0] == binary_to_int(settings.P2PKH_VERSION_BYTE): + return P2PKH.create_output_script(address, timelock) + elif address[0] == binary_to_int(settings.MULTISIG_VERSION_BYTE): + return MultiSig.create_output_script(address, timelock) + else: + raise ScriptError('The address is not valid') + + +def parse_address_script(script: bytes) -> Optional[BaseScript]: + """ Verifies if script is P2PKH, Multisig or DataScript and calls correct parse_script method + + :param script: script to decode + :type script: bytes + + :return: P2PKH, MultiSig or DataScript class or None + :rtype: class or None + """ + script_classes: List[Type[Union[BaseScript]]] = [P2PKH, MultiSig, DataScript] + # Each class verifies its script + for script_class in script_classes: + script_obj = script_class.parse_script(script) + if script_obj is not None: + return script_obj + return None + + +def get_pushdata(data: bytes) -> bytes: + if data[0] > 75: + length = data[1] + start = 2 + else: + length = data[0] + start = 1 + return data[start:(start + length)] + + +def binary_to_int(binary: bytes) -> int: + """Receives a binary and transforms it to an integer + + :param binary: value to convert + :type binary: bytes + """ + if len(binary) == 1: + _format = '!B' + elif len(binary) == 2: + _format = '!H' + elif len(binary) == 4: + _format = '!I' + elif len(binary) == 8: + _format = '!Q' + else: + raise struct.error + + value: int + (value,) = struct.unpack(_format, binary) + return value diff --git a/hathorlib/hathorlib/serialization/__init__.py b/hathorlib/hathorlib/serialization/__init__.py new file mode 100644 index 000000000..65e1626a0 --- /dev/null +++ b/hathorlib/hathorlib/serialization/__init__.py @@ -0,0 +1,27 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .deserializer import Deserializer +from .exceptions import BadDataError, OutOfDataError, SerializationError, TooLongError, UnsupportedTypeError +from .serializer import Serializer + +__all__ = [ + 'Serializer', + 'Deserializer', + 'SerializationError', + 'UnsupportedTypeError', + 'TooLongError', + 'OutOfDataError', + 'BadDataError', +] diff --git a/hathorlib/hathorlib/serialization/adapters/__init__.py b/hathorlib/hathorlib/serialization/adapters/__init__.py new file mode 100644 index 000000000..88a7d948a --- /dev/null +++ b/hathorlib/hathorlib/serialization/adapters/__init__.py @@ -0,0 +1,10 @@ +from .generic_adapter import GenericDeserializerAdapter, GenericSerializerAdapter +from .max_bytes import MaxBytesDeserializer, MaxBytesExceededError, MaxBytesSerializer + +__all__ = [ + 'GenericDeserializerAdapter', + 'GenericSerializerAdapter', + 'MaxBytesDeserializer', + 'MaxBytesExceededError', + 'MaxBytesSerializer', +] diff --git a/hathorlib/hathorlib/serialization/adapters/generic_adapter.py b/hathorlib/hathorlib/serialization/adapters/generic_adapter.py new file mode 100644 index 000000000..c7f7d5862 --- /dev/null +++ b/hathorlib/hathorlib/serialization/adapters/generic_adapter.py @@ -0,0 +1,110 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from types import TracebackType +from typing import Generic, TypeVar, Union + +from typing_extensions import Self, override + +from hathorlib.serialization.deserializer import Deserializer +from hathorlib.serialization.serializer import Serializer + +from ..types import Buffer + +S = TypeVar('S', bound=Serializer) +D = TypeVar('D', bound=Deserializer) + + +class GenericSerializerAdapter(Serializer, Generic[S]): + inner: S + + def __init__(self, serializer: S) -> None: + self.inner = serializer + + @override + def finalize(self) -> Buffer: + return self.inner.finalize() + + @override + def cur_pos(self) -> int: + return self.inner.cur_pos() + + @override + def write_byte(self, data: int) -> None: + self.inner.write_byte(data) + + @override + def write_bytes(self, data: Buffer) -> None: + self.inner.write_bytes(data) + + # allow using this adapter as a context manager: + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: Union[type[BaseException], None], + exc_value: Union[BaseException, None], + traceback: Union[TracebackType, None], + ) -> None: + pass + + +class GenericDeserializerAdapter(Deserializer, Generic[D]): + inner: D + + def __init__(self, deserializer: D) -> None: + self.inner = deserializer + + @override + def finalize(self) -> None: + return self.inner.finalize() + + @override + def is_empty(self) -> bool: + return self.inner.is_empty() + + @override + def peek_byte(self) -> int: + return self.inner.peek_byte() + + @override + def peek_bytes(self, n: int, *, exact: bool = True) -> Buffer: + return self.inner.peek_bytes(n, exact=exact) + + @override + def read_byte(self) -> int: + return self.inner.read_byte() + + @override + def read_bytes(self, n: int, *, exact: bool = True) -> Buffer: + return self.inner.read_bytes(n, exact=exact) + + @override + def read_all(self) -> Buffer: + return self.inner.read_all() + + # allow using this adapter as a context manager: + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: Union[type[BaseException], None], + exc_value: Union[BaseException, None], + traceback: Union[TracebackType, None], + ) -> None: + pass diff --git a/hathorlib/hathorlib/serialization/adapters/max_bytes.py b/hathorlib/hathorlib/serialization/adapters/max_bytes.py new file mode 100644 index 000000000..f435d61db --- /dev/null +++ b/hathorlib/hathorlib/serialization/adapters/max_bytes.py @@ -0,0 +1,91 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TypeVar + +from typing_extensions import override + +from hathorlib.serialization.deserializer import Deserializer +from hathorlib.serialization.exceptions import SerializationError +from hathorlib.serialization.serializer import Serializer + +from ..types import Buffer +from .generic_adapter import GenericDeserializerAdapter, GenericSerializerAdapter + +S = TypeVar('S', bound=Serializer) +D = TypeVar('D', bound=Deserializer) + + +class MaxBytesExceededError(SerializationError): + """ This error is raised when the adapted serializer reached its maximum bytes write/read. + + After this exception is raised the adapted serializer cannot be used anymore. Handlers of this exception are + expected to either: bubble up the exception (or an equivalente exception), or return an error. Handlers should not + try to write again on the same serializer. + + It is possible that the inner serializer is still usable, but the point where the serialized stopped writing or + reading might leave the rest of the data unusable, so for that reason it should be considered a failed + (de)serialization overall, and not simply a failed "read/write" operation. + """ + pass + + +class MaxBytesSerializer(GenericSerializerAdapter[S]): + def __init__(self, serializer: S, max_bytes: int) -> None: + super().__init__(serializer) + self._bytes_left = max_bytes + + def _check_update_exceeds(self, write_size: int) -> None: + self._bytes_left -= write_size + if self._bytes_left < 0: + raise MaxBytesExceededError + + @override + def write_byte(self, data: int) -> None: + self._check_update_exceeds(1) + super().write_byte(data) + + @override + def write_bytes(self, data: Buffer) -> None: + data_view = memoryview(data) + self._check_update_exceeds(len(data_view)) + super().write_bytes(data_view) + + +class MaxBytesDeserializer(GenericDeserializerAdapter[D]): + def __init__(self, deserializer: D, max_bytes: int) -> None: + super().__init__(deserializer) + self._bytes_left = max_bytes + + def _check_update_exceeds(self, read_size: int) -> None: + self._bytes_left -= read_size + if self._bytes_left < 0: + raise MaxBytesExceededError + + @override + def read_byte(self) -> int: + self._check_update_exceeds(1) + return super().read_byte() + + @override + def read_bytes(self, n: int, *, exact: bool = True) -> Buffer: + self._check_update_exceeds(n) + return super().read_bytes(n, exact=exact) + + @override + def read_all(self) -> Buffer: + result = super().read_bytes(self._bytes_left, exact=False) + if not self.is_empty(): + raise MaxBytesExceededError + return result diff --git a/hathorlib/hathorlib/serialization/bytes_deserializer.py b/hathorlib/hathorlib/serialization/bytes_deserializer.py new file mode 100644 index 000000000..1a26ec7b2 --- /dev/null +++ b/hathorlib/hathorlib/serialization/bytes_deserializer.py @@ -0,0 +1,76 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing_extensions import override + +from .deserializer import Deserializer +from .exceptions import OutOfDataError +from .types import Buffer + +_EMPTY_VIEW = memoryview(b'') + + +class BytesDeserializer(Deserializer): + """Simple implementation of a Deserializer to parse values from a byte sequence. + + This implementation maintains a memoryview that is shortened as the bytes are read. + """ + + def __init__(self, data: Buffer) -> None: + self._view = memoryview(data) + + @override + def finalize(self) -> None: + if not self.is_empty(): + raise ValueError('trailing data') + del self._view + + @override + def is_empty(self) -> bool: + # XXX: least amount of OPs, "not" converts to bool with the correct semantics of "is empty" + return not self._view + + @override + def peek_byte(self) -> int: + if not len(self._view): + raise OutOfDataError('not enough bytes to read') + return self._view[0] + + @override + def peek_bytes(self, n: int, *, exact: bool = True) -> memoryview: + if n < 0: + raise ValueError('value cannot be negative') + if exact and len(self._view) < n: + raise OutOfDataError('not enough bytes to read') + return self._view[:n] + + @override + def read_byte(self) -> int: + b = self.peek_byte() + self._view = self._view[1:] + return b + + @override + def read_bytes(self, n: int, *, exact: bool = True) -> memoryview: + b = self.peek_bytes(n, exact=exact) + if exact and len(self._view) < n: + raise OutOfDataError('not enough bytes to read') + self._view = self._view[n:] + return b + + @override + def read_all(self) -> memoryview: + b = self._view + self._view = _EMPTY_VIEW + return b diff --git a/hathorlib/hathorlib/serialization/bytes_serializer.py b/hathorlib/hathorlib/serialization/bytes_serializer.py new file mode 100644 index 000000000..604b39802 --- /dev/null +++ b/hathorlib/hathorlib/serialization/bytes_serializer.py @@ -0,0 +1,53 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing_extensions import override + +from .serializer import Serializer +from .types import Buffer + + +class BytesSerializer(Serializer): + """Simple implementation of Serializer to write to memory. + + This implementation defers joining everything until finalize is called, before that every write is stored as a + memoryview in a list. + """ + + def __init__(self) -> None: + self._parts: list[memoryview] = [] + self._pos: int = 0 + + @override + def finalize(self) -> memoryview: + result = memoryview(b''.join(self._parts)) + del self._parts + del self._pos + return result + + @override + def cur_pos(self) -> int: + return self._pos + + @override + def write_byte(self, data: int) -> None: + # int.to_bytes checks for correct range + self._parts.append(memoryview(int.to_bytes(data, length=1, byteorder='big'))) + self._pos += 1 + + @override + def write_bytes(self, data: Buffer) -> None: + part = memoryview(data) + self._parts.append(part) + self._pos += len(part) diff --git a/hathorlib/hathorlib/serialization/deserializer.py b/hathorlib/hathorlib/serialization/deserializer.py new file mode 100644 index 000000000..5aa48046a --- /dev/null +++ b/hathorlib/hathorlib/serialization/deserializer.py @@ -0,0 +1,109 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import struct +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, Iterator, TypeVar, overload + +from typing_extensions import Self + +from .types import Buffer + +if TYPE_CHECKING: + from .adapters import MaxBytesDeserializer + from .bytes_deserializer import BytesDeserializer + +T = TypeVar('T') + + +class Deserializer(ABC): + def finalize(self) -> None: + """Check that all bytes were consumed, the deserializer cannot be used after this.""" + raise TypeError('this deserializer does not support finalization') + + @staticmethod + def build_bytes_deserializer(data: Buffer) -> BytesDeserializer: + from .bytes_deserializer import BytesDeserializer + return BytesDeserializer(data) + + @abstractmethod + def is_empty(self) -> bool: + raise NotImplementedError + + @abstractmethod + def peek_byte(self) -> int: + """Read a single byte but don't consume from buffer.""" + raise NotImplementedError + + @abstractmethod + def peek_bytes(self, n: int, *, exact: bool = True) -> Buffer: + """Read n single byte but don't consume from buffer.""" + raise NotImplementedError + + def peek_struct(self, format: str) -> tuple[Any, ...]: + size = struct.calcsize(format) + data = self.peek_bytes(size) + return struct.unpack(format, data) + + @abstractmethod + def read_byte(self) -> int: + """Read a single byte as unsigned int.""" + raise NotImplementedError + + @abstractmethod + def read_bytes(self, n: int, *, exact: bool = True) -> Buffer: + """Read n bytes, when exact=True it errors if there isn't enough data""" + # XXX: this is a blanket implementation that is an example of the behavior, this implementation has to be + # explicitly used if needed + def iter_bytes() -> Iterator[int]: + for _ in range(n): + if not exact and self.is_empty(): + break + yield self.read_byte() + return bytes(iter_bytes()) + + @abstractmethod + def read_all(self) -> Buffer: + """Read all bytes until the reader is empty.""" + # XXX: it is recommended that implementors of Deserializer specialize this implementation + def iter_bytes() -> Iterator[int]: + while not self.is_empty(): + yield self.read_byte() + return bytes(iter_bytes()) + + def read_struct(self, format: str) -> tuple[Any, ...]: + size = struct.calcsize(format) + data = self.read_bytes(size) + return struct.unpack_from(format, data) + + def with_max_bytes(self, max_bytes: int) -> MaxBytesDeserializer[Self]: + """Helper method to wrap the current deserializer with MaxBytesDeserializer.""" + from .adapters import MaxBytesDeserializer + return MaxBytesDeserializer(self, max_bytes) + + @overload + def with_optional_max_bytes(self, max_bytes: None) -> Self: + ... + + @overload + def with_optional_max_bytes(self, max_bytes: int) -> MaxBytesDeserializer[Self]: + ... + + def with_optional_max_bytes(self, max_bytes: int | None) -> Self | MaxBytesDeserializer[Self]: + """Helper method to optionally wrap the current deserializer.""" + if max_bytes is None: + return self + return self.with_max_bytes(max_bytes) diff --git a/hathorlib/hathorlib/serialization/encoding/__init__.py b/hathorlib/hathorlib/serialization/encoding/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathorlib/hathorlib/serialization/encoding/leb128.py b/hathorlib/hathorlib/serialization/encoding/leb128.py new file mode 100644 index 000000000..ef154f01a --- /dev/null +++ b/hathorlib/hathorlib/serialization/encoding/leb128.py @@ -0,0 +1,93 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +This module implements LEB128 for signed integers. + +LEB128 or Little Endian Base 128 is a variable-length code compression used to store arbitrarily large +integers in a small number of bytes. LEB128 is used in the DWARF debug file format and the WebAssembly +binary encoding for all integer literals. + +References: +- https://en.wikipedia.org/wiki/LEB128 +- https://dwarfstd.org/doc/DWARF5.pdf +- https://webassembly.github.io/spec/core/binary/values.html#integers + +This module implements LEB128 encoding/decoding using the standard 1-byte block split into 1-bit for continuation and +7-bits for data. The data can be either a signed or unsigned integer. + +>>> se = Serializer.build_bytes_serializer() +>>> se.write_bytes(b'test') # writes 74657374 +>>> encode_leb128(se, 0, signed=True) # writes 00 +>>> encode_leb128(se, 624485, signed=True) # writes e58e26 +>>> encode_leb128(se, -123456, signed=True) # writes c0bb78 +>>> bytes(se.finalize()).hex() +'7465737400e58e26c0bb78' + +>>> data = bytes.fromhex('00 e58e26 c0bb78 74657374') +>>> de = Deserializer.build_bytes_deserializer(data) +>>> decode_leb128(de, signed=True) # reads 00 +0 +>>> decode_leb128(de, signed=True) # reads e58e26 +624485 +>>> decode_leb128(de, signed=True) # reads c0bb78 +-123456 +>>> bytes(de.read_all()) # reads 74657374 +b'test' +>>> de.finalize() +""" + +from hathorlib.serialization import Deserializer, Serializer + + +def encode_leb128(serializer: Serializer, value: int, *, signed: bool) -> None: + """ Encodes an integer using LEB128. + + Caller must explicitly choose `signed=True` or `signed=False`. + + This module's docstring has more details on LEB128 and examples. + """ + if not signed and value < 0: + raise ValueError('cannot encode value <0 as unsigend') + while True: + byte = value & 0b0111_1111 + value >>= 7 + if signed: + cont = (value == 0 and (byte & 0b0100_0000) == 0) or (value == -1 and (byte & 0b0100_0000) != 0) + else: + cont = (value == 0 and (byte & 0b1000_0000) == 0) + if cont: + serializer.write_byte(byte) + break + serializer.write_byte(byte | 0b1000_0000) + + +def decode_leb128(deserializer: Deserializer, *, signed: bool) -> int: + """ Decodes a LEB128-encoded integer. + + Caller must explicitly choose `signed=True` or `signed=False`. + + This module's docstring has more details on LEB128 and examples. + """ + result = 0 + shift = 0 + while True: + byte = deserializer.read_byte() + result |= (byte & 0b0111_1111) << shift + shift += 7 + assert shift % 7 == 0 + if (byte & 0b1000_0000) == 0: + if signed and (byte & 0b0100_0000) != 0: + return result | -(1 << shift) + return result diff --git a/hathorlib/hathorlib/serialization/exceptions.py b/hathorlib/hathorlib/serialization/exceptions.py new file mode 100644 index 000000000..b35f3f8e2 --- /dev/null +++ b/hathorlib/hathorlib/serialization/exceptions.py @@ -0,0 +1,37 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import struct + +from hathorlib.exceptions import HathorError + + +class SerializationError(HathorError): + pass + + +class UnsupportedTypeError(SerializationError): + pass + + +class TooLongError(SerializationError): + pass + + +class OutOfDataError(SerializationError, struct.error): + pass + + +class BadDataError(SerializationError): + pass diff --git a/hathorlib/hathorlib/serialization/serializer.py b/hathorlib/hathorlib/serialization/serializer.py new file mode 100644 index 000000000..46d4135e5 --- /dev/null +++ b/hathorlib/hathorlib/serialization/serializer.py @@ -0,0 +1,78 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +import struct +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, TypeVar, overload + +from typing_extensions import Self + +from .types import Buffer + +if TYPE_CHECKING: + from .adapters import MaxBytesSerializer + from .bytes_serializer import BytesSerializer + +T = TypeVar('T') + + +class Serializer(ABC): + def finalize(self) -> Buffer: + """Get the resulting byte sequence, the serializer cannot be reused after this.""" + raise TypeError('this serializer does not support finalization') + + @abstractmethod + def cur_pos(self) -> int: + raise NotImplementedError + + @abstractmethod + def write_byte(self, data: int) -> None: + """Write a single byte.""" + raise NotImplementedError + + @abstractmethod + def write_bytes(self, data: Buffer) -> None: + # XXX: it is recommended that implementors of Serializer specialize this implementation + for byte in bytes(memoryview(data)): + self.write_byte(byte) + + def write_struct(self, data: tuple[Any, ...], format: str) -> None: + data_bytes = struct.pack(format, *data) + self.write_bytes(data_bytes) + + def with_max_bytes(self, max_bytes: int) -> MaxBytesSerializer[Self]: + """Helper method to wrap the current serializer with MaxBytesSerializer.""" + from .adapters import MaxBytesSerializer + return MaxBytesSerializer(self, max_bytes) + + @overload + def with_optional_max_bytes(self, max_bytes: None) -> Self: + ... + + @overload + def with_optional_max_bytes(self, max_bytes: int) -> MaxBytesSerializer[Self]: + ... + + def with_optional_max_bytes(self, max_bytes: int | None) -> Self | MaxBytesSerializer[Self]: + """Helper method to optionally wrap the current serializer.""" + if max_bytes is None: + return self + return self.with_max_bytes(max_bytes) + + @staticmethod + def build_bytes_serializer() -> BytesSerializer: + from .bytes_serializer import BytesSerializer + return BytesSerializer() diff --git a/hathorlib/hathorlib/serialization/types.py b/hathorlib/hathorlib/serialization/types.py new file mode 100644 index 000000000..4455b4f86 --- /dev/null +++ b/hathorlib/hathorlib/serialization/types.py @@ -0,0 +1,19 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Union + +from typing_extensions import TypeAlias + +Buffer: TypeAlias = Union[bytes, memoryview] diff --git a/hathorlib/hathorlib/token_creation_tx.py b/hathorlib/hathorlib/token_creation_tx.py new file mode 100644 index 000000000..6622fd0f8 --- /dev/null +++ b/hathorlib/hathorlib/token_creation_tx.py @@ -0,0 +1,260 @@ +""" +Copyright 2019 Hathor Labs + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +""" +from enum import IntEnum +from struct import error as StructError, pack +from typing import Tuple + +from hathorlib.base_transaction import TxInput, TxOutput +from hathorlib.conf import HathorSettings +from hathorlib.exceptions import TransactionDataError +from hathorlib.scripts import DataScript +from hathorlib.transaction import Transaction +from hathorlib.utils import clean_token_string, int_to_bytes, unpack, unpack_len + +settings = HathorSettings() + +# Signal bits (B), version (B), inputs len (B), outputs len (B) +_FUNDS_FORMAT_STRING = '!BBBB' + +# Signal bist (B), version (B), inputs len (B), outputs len (B) +_SIGHASH_ALL_FORMAT_STRING = '!BBBB' + + +# used when (de)serializing token information +# version 1 is the default behavior +class TokenVersion(IntEnum): + NATIVE = 0 + DEPOSIT = 1 + FEE = 2 + + +class TokenCreationTransaction(Transaction): + def __init__(self) -> None: + super().__init__() + # for this special tx, its own hash is used as the created token uid. We're artificially + # creating the tokens list here + self.tokens = [] + self.token_version: TokenVersion = TokenVersion.DEPOSIT + + def __str__(self) -> str: + return ( + f'TokenCreationTransaction(nonce={self.nonce}, ' + f'timestamp={self.timestamp}, ' + f'version={int(self.version)}, ' + f'weight={self.weight:.6f}, ' + f'hash={self.hash_hex}, ' + f'token_name={self.token_name}, ' + f'token_symbol={self.token_symbol}, ' + f'token_version={self.token_version})' + ) + + def update_hash(self) -> None: + """ When we update the hash, we also have to update the tokens uid list + """ + super().update_hash() + assert self.hash is not None + self.tokens = [self.hash] + + def get_funds_fields_from_struct(self, buf: bytes) -> bytes: + """ Gets all funds fields for a transaction from a buffer. + + :param buf: Bytes of a serialized transaction + :type buf: bytes + + :return: A buffer containing the remaining struct bytes + :rtype: bytes + + :raises ValueError: when the sequence of bytes is incorect + """ + (self.signal_bits, self.version, inputs_len, outputs_len), buf = unpack(_FUNDS_FORMAT_STRING, buf) + + for _ in range(inputs_len): + txin, buf = TxInput.create_from_bytes(buf) + self.inputs.append(txin) + + for _ in range(outputs_len): + txout, buf = TxOutput.create_from_bytes(buf) + self.outputs.append(txout) + + # token name and symbol + ( + self.token_name, + self.token_symbol, + self.token_version, + buf + ) = TokenCreationTransaction.deserialize_token_info(buf) + + return buf + + def get_funds_struct(self) -> bytes: + """ Returns the funds data serialization of the transaction + + :return: funds data serialization of the transaction + :rtype: bytes + """ + struct_bytes = pack( + _FUNDS_FORMAT_STRING, + self.signal_bits, + self.version, + len(self.inputs), + len(self.outputs) + ) + + tx_inputs = [] + for tx_input in self.inputs: + tx_inputs.append(bytes(tx_input)) + struct_bytes += b''.join(tx_inputs) + + tx_outputs = [] + for tx_output in self.outputs: + tx_outputs.append(bytes(tx_output)) + struct_bytes += b''.join(tx_outputs) + + struct_bytes += self.serialize_token_info() + + return struct_bytes + + def get_sighash_all(self, clear_input_data: bool = True) -> bytes: + """ Returns a serialization of the inputs and outputs without including any other field + + :return: Serialization of the inputs, outputs and tokens + :rtype: bytes + """ + struct_bytes = pack( + _SIGHASH_ALL_FORMAT_STRING, + self.signal_bits, + self.version, + len(self.inputs), + len(self.outputs) + ) + + tx_inputs = [] + for tx_input in self.inputs: + tx_inputs.append(tx_input.get_sighash_bytes(clear_input_data)) + struct_bytes += b''.join(tx_inputs) + + tx_outputs = [] + for tx_output in self.outputs: + tx_outputs.append(bytes(tx_output)) + struct_bytes += b''.join(tx_outputs) + + struct_bytes += self.serialize_token_info() + + for header in self.headers: + struct_bytes += header.get_sighash_bytes() + + return struct_bytes + + def serialize_token_info(self) -> bytes: + """ Returns the serialization for token name and symbol + """ + encoded_name = self.token_name.encode('utf-8') + encoded_symbol = self.token_symbol.encode('utf-8') + + ret = b'' + ret += int_to_bytes(self.token_version, 1) + ret += int_to_bytes(len(encoded_name), 1) + ret += encoded_name + ret += int_to_bytes(len(encoded_symbol), 1) + ret += encoded_symbol + + return ret + + @classmethod + def deserialize_token_info(cls, buf: bytes) -> Tuple[str, str, TokenVersion, bytes]: + """ Gets the token name, symbol and version from serialized format + """ + (raw_token_version,), buf = unpack('!B', buf) + try: + token_version = TokenVersion(raw_token_version) + except ValueError: + raise ValueError('unknown token version: {}'.format(raw_token_version)) + + (name_len,), buf = unpack('!B', buf) + name, buf = unpack_len(name_len, buf) + + (symbol_len,), buf = unpack('!B', buf) + symbol, buf = unpack_len(symbol_len, buf) + + # Token name and symbol can be only utf-8 valid strings for now + decoded_name = decode_string_utf8(name, 'Token name') + decoded_symbol = decode_string_utf8(symbol, 'Token symbol') + + return decoded_name, decoded_symbol, token_version, buf + + def verify_token_info(self) -> None: + """ Validates token info + """ + name_len = len(self.token_name) + symbol_len = len(self.token_symbol) + if name_len == 0 or name_len > settings.MAX_LENGTH_TOKEN_NAME: + raise TransactionDataError('Invalid token name length ({})'.format(name_len)) + if symbol_len == 0 or symbol_len > settings.MAX_LENGTH_TOKEN_SYMBOL: + raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) + + # Can't create token with hathor name or symbol + if clean_token_string(self.token_name) == clean_token_string(settings.HATHOR_TOKEN_NAME): + raise TransactionDataError('Invalid token name ({})'.format(self.token_name)) + if clean_token_string(self.token_symbol) == clean_token_string(settings.HATHOR_TOKEN_SYMBOL): + raise TransactionDataError('Invalid token symbol ({})'.format(self.token_symbol)) + + # Can't create the token with NATIVE version + if self.token_version == TokenVersion.NATIVE: + raise TransactionDataError('Invalid token version ({})'.format(self.token_version)) + + def is_nft_creation_standard(self) -> bool: + """Returns True if it's a standard NFT creation transaction""" + # We will check the outputs to validate that we have an NFT standard creation + # https://github.com/HathorNetwork/rfcs/blob/master/text/0032-nft-standard.md#transaction-standard + if len(self.outputs) < 2: + # NFT creation must have at least a DataScript output (the first one) and a Token P2PKH output + return False + + first_output = self.outputs[0] + parsed_first_output = DataScript.parse_script(first_output.script) + + if parsed_first_output is None: + # First output is not a DataScript output + return False + + if first_output.value != 1 or first_output.token_data != 0: + # NFT creation DataScript output must have value 1 and must be of HTR + return False + + if not first_output.is_standard_script(only_standard_script_type=False): + # Here we check that the script size is standard + return False + + for output in self.outputs[1:]: + if not output.is_standard_script(): + # Invalid output script for an NFT creation tx + return False + + if output.get_token_index() not in [0, 1]: + # All output (except the first) must be of HTR or the created token + return False + + return True + + +def decode_string_utf8(encoded: bytes, key: str) -> str: + """ Raises StructError in case it's not a valid utf-8 string + """ + try: + decoded = encoded.decode('utf-8') + return decoded + except UnicodeDecodeError: + raise StructError('{} must be a valid utf-8 string.'.format(key)) diff --git a/hathorlib/hathorlib/transaction.py b/hathorlib/hathorlib/transaction.py new file mode 100644 index 000000000..8deb85ec7 --- /dev/null +++ b/hathorlib/hathorlib/transaction.py @@ -0,0 +1,246 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from __future__ import annotations + +import struct +from collections import namedtuple +from struct import pack +from typing import TYPE_CHECKING, List, TypeVar + +from hathorlib.base_transaction import TX_HASH_SIZE, BaseTransaction, TxInput, TxOutput +from hathorlib.conf import HathorSettings +from hathorlib.exceptions import InvalidOutputValue, InvalidToken +from hathorlib.headers import VertexBaseHeader +from hathorlib.utils import unpack, unpack_len + +if TYPE_CHECKING: + from hathorlib.headers import FeeHeader, NanoHeader + +T = TypeVar('T', bound=VertexBaseHeader) + +settings = HathorSettings() + +# Signal bits (B), version (B), token uids len (B) and inputs len (B), outputs len (B). +_FUNDS_FORMAT_STRING = '!BBBBB' + +# Signal bits (B), version (B), inputs len (B), and outputs len (B), token uids len (B). +_SIGHASH_ALL_FORMAT_STRING = '!BBBBB' + +TokenInfo = namedtuple('TokenInfo', 'amount can_mint can_melt') + + +class Transaction(BaseTransaction): + __slots__ = ('tokens',) + + SERIALIZATION_NONCE_SIZE = 4 + + def __init__(self) -> None: + """ + Creating new init just to make sure inputs will always be empty array + Inputs: all inputs that are being used (empty in case of a block) + """ + super().__init__() + self.tokens: List[bytes] = [] + + @property + def is_block(self) -> bool: + """Returns true if this is a block""" + return False + + @property + def is_transaction(self) -> bool: + """Returns true if this is a transaction""" + return True + + def is_nano_contract(self) -> bool: + try: + self.get_nano_header() + except ValueError: + return False + else: + return True + + def has_fees(self) -> bool: + """Returns true if this transaction has a fee header""" + try: + self.get_fee_header() + except ValueError: + return False + else: + return True + + def get_nano_header(self) -> NanoHeader: + from hathorlib.headers import NanoHeader + """Return the NanoHeader or raise ValueError.""" + return self._get_header(NanoHeader) + + def get_fee_header(self) -> FeeHeader: + from hathorlib.headers import FeeHeader + """Return the FeeHeader or raise ValueError.""" + return self._get_header(FeeHeader) + + def _get_header(self, header_type: type[T]) -> T: + """Return the header of the given type or raise ValueError.""" + for header in self.headers: + if isinstance(header, header_type): + return header + raise ValueError(f'{header_type.__name__.lower()} not found') + + @classmethod + def create_from_struct(cls, struct_bytes: bytes) -> 'Transaction': + try: + tx = cls() + buf = tx.get_fields_from_struct(struct_bytes) + + if len(buf) < cls.SERIALIZATION_NONCE_SIZE: + raise ValueError('Invalid sequence of bytes') + + [tx.nonce, ], buf = unpack('!I', buf) + + while buf: + buf = tx.get_header_from_bytes(buf) + except struct.error: + raise ValueError('Invalid sequence of bytes') + + tx.update_hash() + return tx + + def calculate_height(self) -> int: + # XXX: transactions don't have height, using 0 as a placeholder + return 0 + + def get_funds_fields_from_struct(self, buf: bytes) -> bytes: + """ Gets all funds fields for a transaction from a buffer. + + :param buf: Bytes of a serialized transaction + :type buf: bytes + + :return: A buffer containing the remaining struct bytes + :rtype: bytes + + :raises ValueError: when the sequence of bytes is incorect + """ + (self.signal_bits, self.version, tokens_len, inputs_len, outputs_len), buf = unpack( + _FUNDS_FORMAT_STRING, + buf + ) + + for _ in range(tokens_len): + token_uid, buf = unpack_len(TX_HASH_SIZE, buf) + self.tokens.append(token_uid) + + for _ in range(inputs_len): + txin, buf = TxInput.create_from_bytes(buf) + self.inputs.append(txin) + + for _ in range(outputs_len): + txout, buf = TxOutput.create_from_bytes(buf) + self.outputs.append(txout) + + return buf + + def get_funds_struct(self) -> bytes: + """Return the funds data serialization of the transaction + + :return: funds data serialization of the transaction + :rtype: bytes + """ + struct_bytes = pack( + _FUNDS_FORMAT_STRING, + self.signal_bits, + self.version, + len(self.tokens), + len(self.inputs), + len(self.outputs) + ) + + for token_uid in self.tokens: + struct_bytes += token_uid + + for tx_input in self.inputs: + struct_bytes += bytes(tx_input) + + for tx_output in self.outputs: + struct_bytes += bytes(tx_output) + + return struct_bytes + + def get_sighash_all(self, clear_input_data: bool = True) -> bytes: + """Return a serialization of the inputs, outputs and tokens without including any other field + + :return: Serialization of the inputs, outputs and tokens + :rtype: bytes + """ + struct_bytes = bytearray( + pack( + _SIGHASH_ALL_FORMAT_STRING, + self.signal_bits, + self.version, + len(self.tokens), + len(self.inputs), + len(self.outputs) + ) + ) + + for token_uid in self.tokens: + struct_bytes += token_uid + + for tx_input in self.inputs: + struct_bytes += tx_input.get_sighash_bytes(clear_input_data) + + for tx_output in self.outputs: + struct_bytes += bytes(tx_output) + + for header in self.headers: + struct_bytes += header.get_sighash_bytes() + + ret = bytes(struct_bytes) + return ret + + def get_token_uid(self, index: int) -> bytes: + """Returns the token uid with corresponding index from the tx token uid list. + + Hathor always has index 0, but we don't include it in the token uid list, so other tokens are + always 1-off. This means that token with index 1 is the first in the list. + + :param index: token index on the token uid list + :type index: int + + :return: the token uid + :rtype: bytes + """ + if index == 0: + return settings.HATHOR_TOKEN_UID + return self.tokens[index - 1] + + def verify_without_storage(self) -> None: + """ Run all verifications that do not need a storage. + """ + self.verify_pow() + self.verify_outputs() + + def verify_outputs(self) -> None: + """Verify outputs reference an existing token uid in the tx list and there are no hathor + authority UTXOs + + :raises InvalidToken: output references non existent token uid or when there's a hathor authority utxo + """ + for index, output in enumerate(self.outputs): + # check index is valid + if output.get_token_index() > len(self.tokens): + raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) + + # no hathor authority UTXO + if (output.get_token_index() == 0) and output.is_token_authority(): + raise InvalidToken('Cannot have authority UTXO for hathor tokens: {}'.format( + output.to_human_readable())) + + # output value must be positive + if output.value <= 0: + raise InvalidOutputValue('Output value must be a positive integer. Value: {} and index: {}'.format( + output.value, index)) diff --git a/hathorlib/hathorlib/utils.py b/hathorlib/hathorlib/utils.py new file mode 100644 index 000000000..c41e75e85 --- /dev/null +++ b/hathorlib/hathorlib/utils.py @@ -0,0 +1,310 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" +import hashlib +import re +import struct +from typing import Any, Tuple, Union, cast + +import base58 +from cryptography.hazmat.primitives.asymmetric import ec +from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat + +from hathorlib.conf import HathorSettings +from hathorlib.exceptions import InvalidAddress +from hathorlib.serialization import Deserializer, SerializationError, Serializer +from hathorlib.serialization.adapters import MaxBytesExceededError +from hathorlib.serialization.encoding.leb128 import decode_leb128, encode_leb128 + +settings = HathorSettings() + + +def int_to_bytes(number: int, size: int, signed: bool = False) -> bytes: + return number.to_bytes(size, byteorder='big', signed=signed) + + +def bytes_to_int(data: bytes, *, signed: bool = False) -> int: + """ + Converts data in bytes to an int. Assumes big-endian format. + + Args: + data: bytes to be converted + signed: whether two's complement is used to represent the integer. + + Returns: the converted data as int + """ + return int.from_bytes(data, byteorder='big', signed=signed) + + +def unpack(fmt: str, buf: bytes) -> Any: + size = struct.calcsize(fmt) + return struct.unpack(fmt, buf[:size]), buf[size:] + + +def unpack_len(n: int, buf: bytes) -> Tuple[bytes, bytes]: + return buf[:n], buf[n:] + + +def get_checksum(address_bytes: bytes) -> bytes: + """ Calculate double sha256 of address and gets first 4 bytes + + :param address_bytes: address before checksum + :param address_bytes: bytes + + :return: checksum of the address + :rtype: bytes + """ + return hashlib.sha256(hashlib.sha256(address_bytes).digest()).digest()[:4] + + +def decode_address(address58: str) -> bytes: + """ Decode address in base58 to bytes + + :param address58: Wallet address in base58 + :type address58: string + + :raises InvalidAddress: if address58 is not a valid base58 string or + not a valid address or has invalid checksum + + :return: Address in bytes + :rtype: bytes + """ + try: + decoded_address = base58.b58decode(address58) + except ValueError: + # Invalid base58 string + raise InvalidAddress('Invalid base58 address') + # Validate address size [25 bytes] + if len(decoded_address) != 25: + raise InvalidAddress('Address size must have 25 bytes') + # Validate the checksum + address_checksum = decoded_address[-4:] + valid_checksum = get_checksum(decoded_address[:-4]) + if address_checksum != valid_checksum: + raise InvalidAddress('Invalid checksum of address') + return decoded_address + + +def get_address_b58_from_public_key_hash(public_key_hash: bytes) -> str: + """Gets the b58 address from the hash of a public key. + + :param public_key_hash: hash of public key (sha256 and ripemd160) + :param public_key_hash: bytes + + :return: address in base 58 + :rtype: string + """ + address = get_address_from_public_key_hash(public_key_hash) + return base58.b58encode(address).decode('utf-8') + + +def get_address_from_public_key_hash(public_key_hash: bytes, + version_byte: bytes = settings.P2PKH_VERSION_BYTE) -> bytes: + """Gets the address in bytes from the public key hash + + :param public_key_hash: hash of public key (sha256 and ripemd160) + :param public_key_hash: bytes + + :param version_byte: first byte of address to define the version of this address + :param version_byte: bytes + + :return: address in bytes + :rtype: bytes + """ + address = b'' + # Version byte + address += version_byte + # Pubkey hash + address += public_key_hash + checksum = get_checksum(address) + address += checksum + return address + + +def get_address_b58_from_redeem_script_hash(redeem_script_hash: bytes, + version_byte: bytes = settings.MULTISIG_VERSION_BYTE) -> str: + """Gets the b58 address from the hash of the redeem script in multisig. + + :param redeem_script_hash: hash of the redeem script (sha256 and ripemd160) + :param redeem_script_hash: bytes + + :return: address in base 58 + :rtype: string + """ + address = get_address_from_redeem_script_hash(redeem_script_hash, version_byte) + return base58.b58encode(address).decode('utf-8') + + +def get_address_from_redeem_script_hash(redeem_script_hash: bytes, + version_byte: bytes = settings.MULTISIG_VERSION_BYTE) -> bytes: + """Gets the address in bytes from the redeem script hash + + :param redeem_script_hash: hash of redeem script (sha256 and ripemd160) + :param redeem_script_hash: bytes + + :param version_byte: first byte of address to define the version of this address + :param version_byte: bytes + + :return: address in bytes + :rtype: bytes + """ + address = b'' + # Version byte + address += version_byte + # redeem script hash + address += redeem_script_hash + checksum = get_checksum(address) + address += checksum + return address + + +def clean_token_string(string: str) -> str: + """ Receives the token name/symbol and returns it after some cleanups. + It sets to uppercase, removes double spaces and spaces at the beginning and end. + """ + return re.sub(r'\s\s+', ' ', string).strip().upper() + + +def get_public_key_from_bytes_compressed(public_key_bytes: bytes) -> ec.EllipticCurvePublicKey: + """Return the cryptography public key from the compressed bytes format.""" + return ec.EllipticCurvePublicKey.from_encoded_point(ec.SECP256K1(), public_key_bytes) + + +def get_address_b58_from_public_key(public_key: ec.EllipticCurvePublicKey) -> str: + """Get the b58 address from a public key.""" + public_key_bytes = get_public_key_bytes_compressed(public_key) + return get_address_b58_from_public_key_bytes(public_key_bytes) + + +def get_address_b58_from_public_key_bytes(public_key_bytes: bytes) -> str: + """Get the b58 address from a public key bytes.""" + public_key_hash = get_hash160(public_key_bytes) + return get_address_b58_from_public_key_hash(public_key_hash) + + +def get_public_key_bytes_compressed(public_key: ec.EllipticCurvePublicKey) -> bytes: + """Return the bytes of a pubkey in the compressed format.""" + return public_key.public_bytes(Encoding.X962, PublicFormat.CompressedPoint) + + +try: + hashlib.new('ripemd160', b'') +except Exception: + # XXX: the source says "Test-only pure Python RIPEMD160 implementation", however for our case this is acceptable + # for more details see: https://github.com/bitcoin/bitcoin/pull/23716/files which has a copy of the same code + import pycoin.contrib.ripemd160 # type: ignore[import-untyped] + + def get_hash160(public_key_bytes: bytes) -> bytes: + """The input is hashed twice: first with SHA-256 and then with RIPEMD-160""" + key_hash = hashlib.sha256(public_key_bytes) + return cast(bytes, pycoin.contrib.ripemd160.ripemd160(key_hash.digest())) +else: + def get_hash160(public_key_bytes: bytes) -> bytes: + """The input is hashed twice: first with SHA-256 and then with RIPEMD-160""" + key_hash = hashlib.sha256(public_key_bytes) + h = hashlib.new('ripemd160') + h.update(key_hash.digest()) + return h.digest() + + +def encode_signed(value: int, *, max_bytes: Union[int, None] = None) -> bytes: + """ + Receive a signed integer and return its LEB128-encoded bytes. + + >>> encode_signed(0) == bytes([0x00]) + True + >>> encode_signed(624485) == bytes([0xE5, 0x8E, 0x26]) + True + >>> encode_signed(-123456) == bytes([0xC0, 0xBB, 0x78]) + True + """ + serializer: Serializer = Serializer.build_bytes_serializer() + try: + encode_leb128(serializer.with_optional_max_bytes(max_bytes), value, signed=True) + except MaxBytesExceededError as e: + raise ValueError(f'cannot encode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('serialization error') from e + return bytes(serializer.finalize()) + + +def encode_unsigned(value: int, *, max_bytes: Union[int, None] = None) -> bytes: + """ + Receive an unsigned integer and return its LEB128-encoded bytes. + + >>> encode_unsigned(0) == bytes([0x00]) + True + >>> encode_unsigned(624485) == bytes([0xE5, 0x8E, 0x26]) + True + """ + serializer: Serializer = Serializer.build_bytes_serializer() + try: + encode_leb128(serializer.with_optional_max_bytes(max_bytes), value, signed=False) + except MaxBytesExceededError as e: + raise ValueError(f'cannot encode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('serialization error') from e + return bytes(serializer.finalize()) + + +def decode_signed(data: bytes, *, max_bytes: Union[int, None] = None) -> tuple[int, bytes]: + """ + Receive and consume a buffer returning a tuple of the unpacked + LEB128-encoded signed integer and the reamining buffer. + + >>> decode_signed(bytes([0x00]) + b'test') + (0, b'test') + >>> decode_signed(bytes([0xE5, 0x8E, 0x26]) + b'test') + (624485, b'test') + >>> decode_signed(bytes([0xC0, 0xBB, 0x78]) + b'test') + (-123456, b'test') + >>> decode_signed(bytes([0xC0, 0xBB, 0x78]) + b'test', max_bytes=3) + (-123456, b'test') + >>> try: + ... decode_signed(bytes([0xC0, 0xBB, 0x78]) + b'test', max_bytes=2) + ... except ValueError as e: + ... print(e) + cannot decode more than 2 bytes + """ + deserializer = Deserializer.build_bytes_deserializer(data) + try: + value = decode_leb128(deserializer.with_optional_max_bytes(max_bytes), signed=True) + except MaxBytesExceededError as e: + raise ValueError(f'cannot decode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('deserialization error') from e + remaining_data = bytes(deserializer.read_all()) + deserializer.finalize() + return (value, remaining_data) + + +def decode_unsigned(data: bytes, *, max_bytes: Union[int, None] = None) -> tuple[int, bytes]: + """ + Receive and consume a buffer returning a tuple of the unpacked + LEB128-encoded unsigned integer and the reamining buffer. + + >>> decode_unsigned(bytes([0x00]) + b'test') + (0, b'test') + >>> decode_unsigned(bytes([0xE5, 0x8E, 0x26]) + b'test') + (624485, b'test') + >>> decode_unsigned(bytes([0xE5, 0x8E, 0x26]) + b'test', max_bytes=3) + (624485, b'test') + >>> try: + ... decode_unsigned(bytes([0xE5, 0x8E, 0x26]) + b'test', max_bytes=2) + ... except ValueError as e: + ... print(e) + cannot decode more than 2 bytes + """ + deserializer = Deserializer.build_bytes_deserializer(data) + try: + value = decode_leb128(deserializer.with_optional_max_bytes(max_bytes), signed=False) + except MaxBytesExceededError as e: + raise ValueError(f'cannot decode more than {max_bytes} bytes') from e + except SerializationError as e: + raise ValueError('deserialization error') from e + remaining_data = bytes(deserializer.read_all()) + deserializer.finalize() + return (value, remaining_data) diff --git a/hathorlib/hathorlib/vertex_parser.py b/hathorlib/hathorlib/vertex_parser.py new file mode 100644 index 000000000..ccf8152cb --- /dev/null +++ b/hathorlib/hathorlib/vertex_parser.py @@ -0,0 +1,43 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from hathorlib.headers import VertexBaseHeader, VertexHeaderId + + +class VertexParser: + __slots__ = () + + @staticmethod + def get_supported_headers() -> dict[VertexHeaderId, type[VertexBaseHeader]]: + """Return a dict of supported headers.""" + from hathorlib.headers import FeeHeader, NanoHeader, VertexHeaderId + return { + VertexHeaderId.NANO_HEADER: NanoHeader, + VertexHeaderId.FEE_HEADER: FeeHeader, + } + + @staticmethod + def get_header_parser(header_id_bytes: bytes) -> type[VertexBaseHeader]: + """Get the parser for a given header type.""" + from hathorlib.headers import VertexHeaderId + header_id = VertexHeaderId(header_id_bytes) + supported_headers = VertexParser.get_supported_headers() + if header_id not in supported_headers: + raise ValueError(f'Header type not supported: {header_id_bytes!r}') + return supported_headers[header_id] diff --git a/hathorlib/poetry.lock b/hathorlib/poetry.lock new file mode 100644 index 000000000..d3c9028f1 --- /dev/null +++ b/hathorlib/poetry.lock @@ -0,0 +1,1450 @@ +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.9.5" +description = "Async http client/server framework (asyncio)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, + {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, + {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, + {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, + {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, + {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, + {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, + {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, + {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, + {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, + {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, + {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, + {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, + {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, + {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, + {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, + {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, + {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, + {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, + {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, + {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, + {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, + {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, + {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, + {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, + {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, + {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "brotlicffi"] + +[[package]] +name = "aiosignal" +version = "1.4.0" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = true +python-versions = ">=3.9" +files = [ + {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, + {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" +typing-extensions = {version = ">=4.2", markers = "python_version < \"3.13\""} + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = true +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "25.4.0" +description = "Classes Without Boilerplate" +optional = true +python-versions = ">=3.9" +files = [ + {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, + {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, +] + +[[package]] +name = "base58" +version = "2.1.1" +description = "Base58 and Base58Check implementation." +optional = false +python-versions = ">=3.5" +files = [ + {file = "base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2"}, + {file = "base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c"}, +] + +[package.extras] +tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", "pytest-cov", "pytest-flake8"] + +[[package]] +name = "cffi" +version = "2.0.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.9" +files = [ + {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, + {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:53f77cbe57044e88bbd5ed26ac1d0514d2acf0591dd6bb02a3ae37f76811b80c"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3e837e369566884707ddaf85fc1744b47575005c0a229de3327f8f9a20f4efeb"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:5eda85d6d1879e692d546a078b44251cdd08dd1cfb98dfb77b670c97cee49ea0"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9332088d75dc3241c702d852d4671613136d90fa6881da7d770a483fd05248b4"}, + {file = "cffi-2.0.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fc7de24befaeae77ba923797c7c87834c73648a05a4bde34b3b7e5588973a453"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf364028c016c03078a23b503f02058f1814320a56ad535686f90565636a9495"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e11e82b744887154b182fd3e7e8512418446501191994dbf9c9fc1f32cc8efd5"}, + {file = "cffi-2.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8ea985900c5c95ce9db1745f7933eeef5d314f0565b27625d9a10ec9881e1bfb"}, + {file = "cffi-2.0.0-cp310-cp310-win32.whl", hash = "sha256:1f72fb8906754ac8a2cc3f9f5aaa298070652a0ffae577e0ea9bd480dc3c931a"}, + {file = "cffi-2.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:b18a3ed7d5b3bd8d9ef7a8cb226502c6bf8308df1525e1cc676c3680e7176739"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe"}, + {file = "cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664"}, + {file = "cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414"}, + {file = "cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743"}, + {file = "cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5"}, + {file = "cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5"}, + {file = "cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d"}, + {file = "cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037"}, + {file = "cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94"}, + {file = "cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187"}, + {file = "cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18"}, + {file = "cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5"}, + {file = "cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb"}, + {file = "cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3"}, + {file = "cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c"}, + {file = "cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b"}, + {file = "cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27"}, + {file = "cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75"}, + {file = "cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5"}, + {file = "cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef"}, + {file = "cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205"}, + {file = "cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1"}, + {file = "cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f"}, + {file = "cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25"}, + {file = "cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9"}, + {file = "cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc"}, + {file = "cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512"}, + {file = "cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4"}, + {file = "cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e"}, + {file = "cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6"}, + {file = "cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:fe562eb1a64e67dd297ccc4f5addea2501664954f2692b69a76449ec7913ecbf"}, + {file = "cffi-2.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de8dad4425a6ca6e4e5e297b27b5c824ecc7581910bf9aee86cb6835e6812aa7"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:4647afc2f90d1ddd33441e5b0e85b16b12ddec4fca55f0d9671fef036ecca27c"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3f4d46d8b35698056ec29bca21546e1551a205058ae1a181d871e278b0b28165"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:e6e73b9e02893c764e7e8d5bb5ce277f1a009cd5243f8228f75f842bf937c534"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:cb527a79772e5ef98fb1d700678fe031e353e765d1ca2d409c92263c6d43e09f"}, + {file = "cffi-2.0.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:61d028e90346df14fedc3d1e5441df818d095f3b87d286825dfcbd6459b7ef63"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0f6084a0ea23d05d20c3edcda20c3d006f9b6f3fefeac38f59262e10cef47ee2"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1cd13c99ce269b3ed80b417dcd591415d3372bcac067009b6e0f59c7d4015e65"}, + {file = "cffi-2.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89472c9762729b5ae1ad974b777416bfda4ac5642423fa93bd57a09204712322"}, + {file = "cffi-2.0.0-cp39-cp39-win32.whl", hash = "sha256:2081580ebb843f759b9f617314a24ed5738c51d2aee65d31e02f6f7a2b97707a"}, + {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, + {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, +] + +[package.dependencies] +pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.10.7" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "coverage-7.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fc04cc7a3db33664e0c2d10eb8990ff6b3536f6842c9590ae8da4c614b9ed05a"}, + {file = "coverage-7.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e201e015644e207139f7e2351980feb7040e6f4b2c2978892f3e3789d1c125e5"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:240af60539987ced2c399809bd34f7c78e8abe0736af91c3d7d0e795df633d17"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8421e088bc051361b01c4b3a50fd39a4b9133079a2229978d9d30511fd05231b"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6be8ed3039ae7f7ac5ce058c308484787c86e8437e72b30bf5e88b8ea10f3c87"}, + {file = "coverage-7.10.7-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e28299d9f2e889e6d51b1f043f58d5f997c373cc12e6403b90df95b8b047c13e"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c4e16bd7761c5e454f4efd36f345286d6f7c5fa111623c355691e2755cae3b9e"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b1c81d0e5e160651879755c9c675b974276f135558cf4ba79fee7b8413a515df"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:606cc265adc9aaedcc84f1f064f0e8736bc45814f15a357e30fca7ecc01504e0"}, + {file = "coverage-7.10.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:10b24412692df990dbc34f8fb1b6b13d236ace9dfdd68df5b28c2e39cafbba13"}, + {file = "coverage-7.10.7-cp310-cp310-win32.whl", hash = "sha256:b51dcd060f18c19290d9b8a9dd1e0181538df2ce0717f562fff6cf74d9fc0b5b"}, + {file = "coverage-7.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:3a622ac801b17198020f09af3eaf45666b344a0d69fc2a6ffe2ea83aeef1d807"}, + {file = "coverage-7.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a609f9c93113be646f44c2a0256d6ea375ad047005d7f57a5c15f614dc1b2f59"}, + {file = "coverage-7.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:65646bb0359386e07639c367a22cf9b5bf6304e8630b565d0626e2bdf329227a"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5f33166f0dfcce728191f520bd2692914ec70fac2713f6bf3ce59c3deacb4699"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:35f5e3f9e455bb17831876048355dca0f758b6df22f49258cb5a91da23ef437d"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4da86b6d62a496e908ac2898243920c7992499c1712ff7c2b6d837cc69d9467e"}, + {file = "coverage-7.10.7-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6b8b09c1fad947c84bbbc95eca841350fad9cbfa5a2d7ca88ac9f8d836c92e23"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4376538f36b533b46f8971d3a3e63464f2c7905c9800db97361c43a2b14792ab"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:121da30abb574f6ce6ae09840dae322bef734480ceafe410117627aa54f76d82"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:88127d40df529336a9836870436fc2751c339fbaed3a836d42c93f3e4bd1d0a2"}, + {file = "coverage-7.10.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ba58bbcd1b72f136080c0bccc2400d66cc6115f3f906c499013d065ac33a4b61"}, + {file = "coverage-7.10.7-cp311-cp311-win32.whl", hash = "sha256:972b9e3a4094b053a4e46832b4bc829fc8a8d347160eb39d03f1690316a99c14"}, + {file = "coverage-7.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:a7b55a944a7f43892e28ad4bc0561dfd5f0d73e605d1aa5c3c976b52aea121d2"}, + {file = "coverage-7.10.7-cp311-cp311-win_arm64.whl", hash = "sha256:736f227fb490f03c6488f9b6d45855f8e0fd749c007f9303ad30efab0e73c05a"}, + {file = "coverage-7.10.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7bb3b9ddb87ef7725056572368040c32775036472d5a033679d1fa6c8dc08417"}, + {file = "coverage-7.10.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:18afb24843cbc175687225cab1138c95d262337f5473512010e46831aa0c2973"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:399a0b6347bcd3822be369392932884b8216d0944049ae22925631a9b3d4ba4c"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314f2c326ded3f4b09be11bc282eb2fc861184bc95748ae67b360ac962770be7"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c41e71c9cfb854789dee6fc51e46743a6d138b1803fab6cb860af43265b42ea6"}, + {file = "coverage-7.10.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc01f57ca26269c2c706e838f6422e2a8788e41b3e3c65e2f41148212e57cd59"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a6442c59a8ac8b85812ce33bc4d05bde3fb22321fa8294e2a5b487c3505f611b"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:78a384e49f46b80fb4c901d52d92abe098e78768ed829c673fbb53c498bef73a"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5e1e9802121405ede4b0133aa4340ad8186a1d2526de5b7c3eca519db7bb89fb"}, + {file = "coverage-7.10.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d41213ea25a86f69efd1575073d34ea11aabe075604ddf3d148ecfec9e1e96a1"}, + {file = "coverage-7.10.7-cp312-cp312-win32.whl", hash = "sha256:77eb4c747061a6af8d0f7bdb31f1e108d172762ef579166ec84542f711d90256"}, + {file = "coverage-7.10.7-cp312-cp312-win_amd64.whl", hash = "sha256:f51328ffe987aecf6d09f3cd9d979face89a617eacdaea43e7b3080777f647ba"}, + {file = "coverage-7.10.7-cp312-cp312-win_arm64.whl", hash = "sha256:bda5e34f8a75721c96085903c6f2197dc398c20ffd98df33f866a9c8fd95f4bf"}, + {file = "coverage-7.10.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:981a651f543f2854abd3b5fcb3263aac581b18209be49863ba575de6edf4c14d"}, + {file = "coverage-7.10.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:73ab1601f84dc804f7812dc297e93cd99381162da39c47040a827d4e8dafe63b"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a8b6f03672aa6734e700bbcd65ff050fd19cddfec4b031cc8cf1c6967de5a68e"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10b6ba00ab1132a0ce4428ff68cf50a25efd6840a42cdf4239c9b99aad83be8b"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c79124f70465a150e89340de5963f936ee97097d2ef76c869708c4248c63ca49"}, + {file = "coverage-7.10.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:69212fbccdbd5b0e39eac4067e20a4a5256609e209547d86f740d68ad4f04911"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7ea7c6c9d0d286d04ed3541747e6597cbe4971f22648b68248f7ddcd329207f0"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b9be91986841a75042b3e3243d0b3cb0b2434252b977baaf0cd56e960fe1e46f"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:b281d5eca50189325cfe1f365fafade89b14b4a78d9b40b05ddd1fc7d2a10a9c"}, + {file = "coverage-7.10.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:99e4aa63097ab1118e75a848a28e40d68b08a5e19ce587891ab7fd04475e780f"}, + {file = "coverage-7.10.7-cp313-cp313-win32.whl", hash = "sha256:dc7c389dce432500273eaf48f410b37886be9208b2dd5710aaf7c57fd442c698"}, + {file = "coverage-7.10.7-cp313-cp313-win_amd64.whl", hash = "sha256:cac0fdca17b036af3881a9d2729a850b76553f3f716ccb0360ad4dbc06b3b843"}, + {file = "coverage-7.10.7-cp313-cp313-win_arm64.whl", hash = "sha256:4b6f236edf6e2f9ae8fcd1332da4e791c1b6ba0dc16a2dc94590ceccb482e546"}, + {file = "coverage-7.10.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0ec07fd264d0745ee396b666d47cef20875f4ff2375d7c4f58235886cc1ef0c"}, + {file = "coverage-7.10.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:dd5e856ebb7bfb7672b0086846db5afb4567a7b9714b8a0ebafd211ec7ce6a15"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f57b2a3c8353d3e04acf75b3fed57ba41f5c0646bbf1d10c7c282291c97936b4"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:1ef2319dd15a0b009667301a3f84452a4dc6fddfd06b0c5c53ea472d3989fbf0"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:83082a57783239717ceb0ad584de3c69cf581b2a95ed6bf81ea66034f00401c0"}, + {file = "coverage-7.10.7-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:50aa94fb1fb9a397eaa19c0d5ec15a5edd03a47bf1a3a6111a16b36e190cff65"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:2120043f147bebb41c85b97ac45dd173595ff14f2a584f2963891cbcc3091541"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2fafd773231dd0378fdba66d339f84904a8e57a262f583530f4f156ab83863e6"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:0b944ee8459f515f28b851728ad224fa2d068f1513ef6b7ff1efafeb2185f999"}, + {file = "coverage-7.10.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4b583b97ab2e3efe1b3e75248a9b333bd3f8b0b1b8e5b45578e05e5850dfb2c2"}, + {file = "coverage-7.10.7-cp313-cp313t-win32.whl", hash = "sha256:2a78cd46550081a7909b3329e2266204d584866e8d97b898cd7fb5ac8d888b1a"}, + {file = "coverage-7.10.7-cp313-cp313t-win_amd64.whl", hash = "sha256:33a5e6396ab684cb43dc7befa386258acb2d7fae7f67330ebb85ba4ea27938eb"}, + {file = "coverage-7.10.7-cp313-cp313t-win_arm64.whl", hash = "sha256:86b0e7308289ddde73d863b7683f596d8d21c7d8664ce1dee061d0bcf3fbb4bb"}, + {file = "coverage-7.10.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b06f260b16ead11643a5a9f955bd4b5fd76c1a4c6796aeade8520095b75de520"}, + {file = "coverage-7.10.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:212f8f2e0612778f09c55dd4872cb1f64a1f2b074393d139278ce902064d5b32"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3445258bcded7d4aa630ab8296dea4d3f15a255588dd535f980c193ab6b95f3f"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb45474711ba385c46a0bfe696c695a929ae69ac636cda8f532be9e8c93d720a"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:813922f35bd800dca9994c5971883cbc0d291128a5de6b167c7aa697fcf59360"}, + {file = "coverage-7.10.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:93c1b03552081b2a4423091d6fb3787265b8f86af404cff98d1b5342713bdd69"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:cc87dd1b6eaf0b848eebb1c86469b9f72a1891cb42ac7adcfbce75eadb13dd14"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:39508ffda4f343c35f3236fe8d1a6634a51f4581226a1262769d7f970e73bffe"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:925a1edf3d810537c5a3abe78ec5530160c5f9a26b1f4270b40e62cc79304a1e"}, + {file = "coverage-7.10.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2c8b9a0636f94c43cd3576811e05b89aa9bc2d0a85137affc544ae5cb0e4bfbd"}, + {file = "coverage-7.10.7-cp314-cp314-win32.whl", hash = "sha256:b7b8288eb7cdd268b0304632da8cb0bb93fadcfec2fe5712f7b9cc8f4d487be2"}, + {file = "coverage-7.10.7-cp314-cp314-win_amd64.whl", hash = "sha256:1ca6db7c8807fb9e755d0379ccc39017ce0a84dcd26d14b5a03b78563776f681"}, + {file = "coverage-7.10.7-cp314-cp314-win_arm64.whl", hash = "sha256:097c1591f5af4496226d5783d036bf6fd6cd0cbc132e071b33861de756efb880"}, + {file = "coverage-7.10.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:a62c6ef0d50e6de320c270ff91d9dd0a05e7250cac2a800b7784bae474506e63"}, + {file = "coverage-7.10.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:9fa6e4dd51fe15d8738708a973470f67a855ca50002294852e9571cdbd9433f2"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8fb190658865565c549b6b4706856d6a7b09302c797eb2cf8e7fe9dabb043f0d"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:affef7c76a9ef259187ef31599a9260330e0335a3011732c4b9effa01e1cd6e0"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6e16e07d85ca0cf8bafe5f5d23a0b850064e8e945d5677492b06bbe6f09cc699"}, + {file = "coverage-7.10.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:03ffc58aacdf65d2a82bbeb1ffe4d01ead4017a21bfd0454983b88ca73af94b9"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1b4fd784344d4e52647fd7857b2af5b3fbe6c239b0b5fa63e94eb67320770e0f"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:0ebbaddb2c19b71912c6f2518e791aa8b9f054985a0769bdb3a53ebbc765c6a1"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a2d9a3b260cc1d1dbdb1c582e63ddcf5363426a1a68faa0f5da28d8ee3c722a0"}, + {file = "coverage-7.10.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:a3cc8638b2480865eaa3926d192e64ce6c51e3d29c849e09d5b4ad95efae5399"}, + {file = "coverage-7.10.7-cp314-cp314t-win32.whl", hash = "sha256:67f8c5cbcd3deb7a60b3345dffc89a961a484ed0af1f6f73de91705cc6e31235"}, + {file = "coverage-7.10.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e1ed71194ef6dea7ed2d5cb5f7243d4bcd334bfb63e59878519be558078f848d"}, + {file = "coverage-7.10.7-cp314-cp314t-win_arm64.whl", hash = "sha256:7fe650342addd8524ca63d77b2362b02345e5f1a093266787d210c70a50b471a"}, + {file = "coverage-7.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fff7b9c3f19957020cac546c70025331113d2e61537f6e2441bc7657913de7d3"}, + {file = "coverage-7.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bc91b314cef27742da486d6839b677b3f2793dfe52b51bbbb7cf736d5c29281c"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:567f5c155eda8df1d3d439d40a45a6a5f029b429b06648235f1e7e51b522b396"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2af88deffcc8a4d5974cf2d502251bc3b2db8461f0b66d80a449c33757aa9f40"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c7315339eae3b24c2d2fa1ed7d7a38654cba34a13ef19fbcb9425da46d3dc594"}, + {file = "coverage-7.10.7-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:912e6ebc7a6e4adfdbb1aec371ad04c68854cd3bf3608b3514e7ff9062931d8a"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:f49a05acd3dfe1ce9715b657e28d138578bc40126760efb962322c56e9ca344b"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cce2109b6219f22ece99db7644b9622f54a4e915dad65660ec435e89a3ea7cc3"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:f3c887f96407cea3916294046fc7dab611c2552beadbed4ea901cbc6a40cc7a0"}, + {file = "coverage-7.10.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:635adb9a4507c9fd2ed65f39693fa31c9a3ee3a8e6dc64df033e8fdf52a7003f"}, + {file = "coverage-7.10.7-cp39-cp39-win32.whl", hash = "sha256:5a02d5a850e2979b0a014c412573953995174743a3f7fa4ea5a6e9a3c5617431"}, + {file = "coverage-7.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:c134869d5ffe34547d14e174c866fd8fe2254918cc0a95e99052903bc1543e07"}, + {file = "coverage-7.10.7-py3-none-any.whl", hash = "sha256:f7941f6f2fe6dd6807a1208737b8a0cbcf1cc6d7b07d24998ad2d63590868260"}, + {file = "coverage-7.10.7.tar.gz", hash = "sha256:f4ab143ab113be368a3e9b795f9cd7906c5ef407d6173fe9675a902e1fffc239"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "cryptography" +version = "42.0.8" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "flake8" +version = "7.0.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-7.0.0-py2.py3-none-any.whl", hash = "sha256:a6dfbb75e03252917f2473ea9653f7cd799c3064e54d4c8140044c5c065f53c3"}, + {file = "flake8-7.0.0.tar.gz", hash = "sha256:33f96621059e65eec474169085dc92bf26e7b2d47366b70be2f67ab80dc25132"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.2.0,<3.3.0" + +[[package]] +name = "frozenlist" +version = "1.8.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = true +python-versions = ">=3.9" +files = [ + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b37f6d31b3dcea7deb5e9696e529a6aa4a898adc33db82da12e4c60a7c4d2011"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef2b7b394f208233e471abc541cc6991f907ffd47dc72584acee3147899d6565"}, + {file = "frozenlist-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a88f062f072d1589b7b46e951698950e7da00442fc1cacbe17e19e025dc327ad"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f57fb59d9f385710aa7060e89410aeb5058b99e62f4d16b08b91986b9a2140c2"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:799345ab092bee59f01a915620b5d014698547afd011e691a208637312db9186"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c23c3ff005322a6e16f71bf8692fcf4d5a304aaafe1e262c98c6d4adc7be863e"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8a76ea0f0b9dfa06f254ee06053d93a600865b3274358ca48a352ce4f0798450"}, + {file = "frozenlist-1.8.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c7366fe1418a6133d5aa824ee53d406550110984de7637d65a178010f759c6ef"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13d23a45c4cebade99340c4165bd90eeb4a56c6d8a9d8aa49568cac19a6d0dc4"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4a3408834f65da56c83528fb52ce7911484f0d1eaf7b761fc66001db1646eff"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f"}, + {file = "frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7"}, + {file = "frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6"}, + {file = "frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9"}, + {file = "frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2552f44204b744fba866e573be4c1f9048d6a324dfe14475103fd51613eb1d1f"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:957e7c38f250991e48a9a73e6423db1bb9dd14e722a10f6b8bb8e16a0f55f695"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8585e3bb2cdea02fc88ffa245069c36555557ad3609e83be0ec71f54fd4abb52"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:edee74874ce20a373d62dc28b0b18b93f645633c2943fd90ee9d898550770581"}, + {file = "frozenlist-1.8.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c9a63152fe95756b85f31186bddf42e4c02c6321207fd6601a1c89ebac4fe567"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b6db2185db9be0a04fecf2f241c70b63b1a242e2805be291855078f2b404dd6b"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:f4be2e3d8bc8aabd566f8d5b8ba7ecc09249d74ba3c9ed52e54dc23a293f0b92"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd"}, + {file = "frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967"}, + {file = "frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b"}, + {file = "frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b"}, + {file = "frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b"}, + {file = "frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608"}, + {file = "frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa"}, + {file = "frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746"}, + {file = "frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7"}, + {file = "frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5"}, + {file = "frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8"}, + {file = "frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed"}, + {file = "frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231"}, + {file = "frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c"}, + {file = "frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714"}, + {file = "frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0"}, + {file = "frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888"}, + {file = "frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f"}, + {file = "frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e"}, + {file = "frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30"}, + {file = "frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7"}, + {file = "frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0"}, + {file = "frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed"}, + {file = "frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a"}, + {file = "frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd"}, + {file = "frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8b7138e5cd0647e4523d6685b0eac5d4be9a184ae9634492f25c6eb38c12a47"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a6483e309ca809f1efd154b4d37dc6d9f61037d6c6a81c2dc7a15cb22c8c5dca"}, + {file = "frozenlist-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1b9290cf81e95e93fdf90548ce9d3c1211cf574b8e3f4b3b7cb0537cf2227068"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:59a6a5876ca59d1b63af8cd5e7ffffb024c3dc1e9cf9301b21a2e76286505c95"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc4126390929823e2d2d9dc79ab4046ed74680360fc5f38b585c12c66cdf459"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:332db6b2563333c5671fecacd085141b5800cb866be16d5e3eb15a2086476675"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ff15928d62a0b80bb875655c39bf517938c7d589554cbd2669be42d97c2cb61"}, + {file = "frozenlist-1.8.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7bf6cdf8e07c8151fba6fe85735441240ec7f619f935a5205953d58009aef8c6"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:48e6d3f4ec5c7273dfe83ff27c91083c6c9065af655dc2684d2c200c94308bb5"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:1a7607e17ad33361677adcd1443edf6f5da0ce5e5377b798fba20fae194825f3"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3a935c3a4e89c733303a2d5a7c257ea44af3a56c8202df486b7f5de40f37e1"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:940d4a017dbfed9daf46a3b086e1d2167e7012ee297fef9e1c545c4d022f5178"}, + {file = "frozenlist-1.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b9be22a69a014bc47e78072d0ecae716f5eb56c15238acca0f43d6eb8e4a5bda"}, + {file = "frozenlist-1.8.0-cp39-cp39-win32.whl", hash = "sha256:1aa77cb5697069af47472e39612976ed05343ff2e84a3dcf15437b232cbfd087"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:7398c222d1d405e796970320036b1b563892b65809d9e5261487bb2c7f7b5c6a"}, + {file = "frozenlist-1.8.0-cp39-cp39-win_arm64.whl", hash = "sha256:b4f3b365f31c6cd4af24545ca0a244a53688cad8834e32f56831c4923b50a103"}, + {file = "frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d"}, + {file = "frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad"}, +] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = true +python-versions = ">=3.8" +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.dependencies] +colorama = {version = ">=0.4.6", optional = true, markers = "extra == \"colors\""} + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "librt" +version = "0.7.5" +description = "Mypyc runtime library" +optional = false +python-versions = ">=3.9" +files = [ + {file = "librt-0.7.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:81056e01bba1394f1d92904ec61a4078f66df785316275edbaf51d90da8c6e26"}, + {file = "librt-0.7.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d7c72c8756eeb3aefb1b9e3dac7c37a4a25db63640cac0ab6fc18e91a0edf05a"}, + {file = "librt-0.7.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ddc4a16207f88f9597b397fc1f60781266d13b13de922ff61c206547a29e4bbd"}, + {file = "librt-0.7.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:63055d3dda433ebb314c9f1819942f16a19203c454508fdb2d167613f7017169"}, + {file = "librt-0.7.5-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9f85f9b5db87b0f52e53c68ad2a0c5a53e00afa439bd54a1723742a2b1021276"}, + {file = "librt-0.7.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c566a4672564c5d54d8ab65cdaae5a87ee14c1564c1a2ddc7a9f5811c750f023"}, + {file = "librt-0.7.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fee15c2a190ef389f14928135c6fb2d25cd3fdb7887bfd9a7b444bbdc8c06b96"}, + {file = "librt-0.7.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:584cb3e605ec45ba350962cec853e17be0a25a772f21f09f1e422f7044ae2a7d"}, + {file = "librt-0.7.5-cp310-cp310-win32.whl", hash = "sha256:9c08527055fbb03c641c15bbc5b79dd2942fb6a3bd8dabf141dd7e97eeea4904"}, + {file = "librt-0.7.5-cp310-cp310-win_amd64.whl", hash = "sha256:dd810f2d39c526c42ea205e0addad5dc08ef853c625387806a29d07f9d150d9b"}, + {file = "librt-0.7.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f952e1a78c480edee8fb43aa2bf2e84dcd46c917d44f8065b883079d3893e8fc"}, + {file = "librt-0.7.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75965c1f4efb7234ff52a58b729d245a21e87e4b6a26a0ec08052f02b16274e4"}, + {file = "librt-0.7.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:732e0aa0385b59a1b2545159e781c792cc58ce9c134249233a7c7250a44684c4"}, + {file = "librt-0.7.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cdde31759bd8888f3ef0eebda80394a48961328a17c264dce8cc35f4b9cde35d"}, + {file = "librt-0.7.5-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:df3146d52465b3b6397d25d513f428cb421c18df65b7378667bb5f1e3cc45805"}, + {file = "librt-0.7.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:29c8d2fae11d4379ea207ba7fc69d43237e42cf8a9f90ec6e05993687e6d648b"}, + {file = "librt-0.7.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bb41f04046b4f22b1e7ba5ef513402cd2e3477ec610e5f92d38fe2bba383d419"}, + {file = "librt-0.7.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8bb7883c1e94ceb87c2bf81385266f032da09cd040e804cc002f2c9d6b842e2f"}, + {file = "librt-0.7.5-cp311-cp311-win32.whl", hash = "sha256:84d4a6b9efd6124f728558a18e79e7cc5c5d4efc09b2b846c910de7e564f5bad"}, + {file = "librt-0.7.5-cp311-cp311-win_amd64.whl", hash = "sha256:ab4b0d3bee6f6ff7017e18e576ac7e41a06697d8dea4b8f3ab9e0c8e1300c409"}, + {file = "librt-0.7.5-cp311-cp311-win_arm64.whl", hash = "sha256:730be847daad773a3c898943cf67fb9845a3961d06fb79672ceb0a8cd8624cfa"}, + {file = "librt-0.7.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ba1077c562a046208a2dc6366227b3eeae8f2c2ab4b41eaf4fd2fa28cece4203"}, + {file = "librt-0.7.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:654fdc971c76348a73af5240d8e2529265b9a7ba6321e38dd5bae7b0d4ab3abe"}, + {file = "librt-0.7.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6b7b58913d475911f6f33e8082f19dd9b120c4f4a5c911d07e395d67b81c6982"}, + {file = "librt-0.7.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e0fd344bad57026a8f4ccfaf406486c2fc991838050c2fef156170edc3b775"}, + {file = "librt-0.7.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:46aa91813c267c3f60db75d56419b42c0c0b9748ec2c568a0e3588e543fb4233"}, + {file = "librt-0.7.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ddc0ab9dbc5f9ceaf2bf7a367bf01f2697660e908f6534800e88f43590b271db"}, + {file = "librt-0.7.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7a488908a470451338607650f1c064175094aedebf4a4fa37890682e30ce0b57"}, + {file = "librt-0.7.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e47fc52602ffc374e69bf1b76536dc99f7f6dd876bd786c8213eaa3598be030a"}, + {file = "librt-0.7.5-cp312-cp312-win32.whl", hash = "sha256:cda8b025875946ffff5a9a7590bf9acde3eb02cb6200f06a2d3e691ef3d9955b"}, + {file = "librt-0.7.5-cp312-cp312-win_amd64.whl", hash = "sha256:b591c094afd0ffda820e931148c9e48dc31a556dc5b2b9b3cc552fa710d858e4"}, + {file = "librt-0.7.5-cp312-cp312-win_arm64.whl", hash = "sha256:532ddc6a8a6ca341b1cd7f4d999043e4c71a212b26fe9fd2e7f1e8bb4e873544"}, + {file = "librt-0.7.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b1795c4b2789b458fa290059062c2f5a297ddb28c31e704d27e161386469691a"}, + {file = "librt-0.7.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2fcbf2e135c11f721193aa5f42ba112bb1046afafbffd407cbc81d8d735c74d0"}, + {file = "librt-0.7.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c039bbf79a9a2498404d1ae7e29a6c175e63678d7a54013a97397c40aee026c5"}, + {file = "librt-0.7.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3919c9407faeeee35430ae135e3a78acd4ecaaaa73767529e2c15ca1d73ba325"}, + {file = "librt-0.7.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:26b46620e1e0e45af510d9848ea0915e7040605dd2ae94ebefb6c962cbb6f7ec"}, + {file = "librt-0.7.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9bbb8facc5375476d392990dd6a71f97e4cb42e2ac66f32e860f6e47299d5e89"}, + {file = "librt-0.7.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e9e9c988b5ffde7be02180f864cbd17c0b0c1231c235748912ab2afa05789c25"}, + {file = "librt-0.7.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:edf6b465306215b19dbe6c3fb63cf374a8f3e1ad77f3b4c16544b83033bbb67b"}, + {file = "librt-0.7.5-cp313-cp313-win32.whl", hash = "sha256:060bde69c3604f694bd8ae21a780fe8be46bb3dbb863642e8dfc75c931ca8eee"}, + {file = "librt-0.7.5-cp313-cp313-win_amd64.whl", hash = "sha256:a82d5a0ee43aeae2116d7292c77cc8038f4841830ade8aa922e098933b468b9e"}, + {file = "librt-0.7.5-cp313-cp313-win_arm64.whl", hash = "sha256:3c98a8d0ac9e2a7cb8ff8c53e5d6e8d82bfb2839abf144fdeaaa832f2a12aa45"}, + {file = "librt-0.7.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:9937574e6d842f359b8585903d04f5b4ab62277a091a93e02058158074dc52f2"}, + {file = "librt-0.7.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5cd3afd71e9bc146203b6c8141921e738364158d4aa7cdb9a874e2505163770f"}, + {file = "librt-0.7.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9cffa3ef0af29687455161cb446eff059bf27607f95163d6a37e27bcb37180f6"}, + {file = "librt-0.7.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82f3f088482e2229387eadf8215c03f7726d56f69cce8c0c40f0795aebc9b361"}, + {file = "librt-0.7.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7aa33153a5bb0bac783d2c57885889b1162823384e8313d47800a0e10d0070e"}, + {file = "librt-0.7.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:265729b551a2dd329cc47b323a182fb7961af42abf21e913c9dd7d3331b2f3c2"}, + {file = "librt-0.7.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:168e04663e126416ba712114050f413ac306759a1791d87b7c11d4428ba75760"}, + {file = "librt-0.7.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:553dc58987d1d853adda8aeadf4db8e29749f0b11877afcc429a9ad892818ae2"}, + {file = "librt-0.7.5-cp314-cp314-win32.whl", hash = "sha256:263f4fae9eba277513357c871275b18d14de93fd49bf5e43dc60a97b81ad5eb8"}, + {file = "librt-0.7.5-cp314-cp314-win_amd64.whl", hash = "sha256:85f485b7471571e99fab4f44eeb327dc0e1f814ada575f3fa85e698417d8a54e"}, + {file = "librt-0.7.5-cp314-cp314-win_arm64.whl", hash = "sha256:49c596cd18e90e58b7caa4d7ca7606049c1802125fcff96b8af73fa5c3870e4d"}, + {file = "librt-0.7.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:54d2aef0b0f5056f130981ad45081b278602ff3657fe16c88529f5058038e802"}, + {file = "librt-0.7.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0b4791202296ad51ac09a3ff58eb49d9da8e3a4009167a6d76ac418a974e5fd4"}, + {file = "librt-0.7.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e860909fea75baef941ee6436e0453612505883b9d0d87924d4fda27865b9a2"}, + {file = "librt-0.7.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f02c4337bf271c4f06637f5ff254fad2238c0b8e32a3a480ebb2fc5e26f754a5"}, + {file = "librt-0.7.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7f51ffe59f4556243d3cc82d827bde74765f594fa3ceb80ec4de0c13ccd3416"}, + {file = "librt-0.7.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0b7f080ba30601dfa3e3deed3160352273e1b9bc92e652f51103c3e9298f7899"}, + {file = "librt-0.7.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:fb565b4219abc8ea2402e61c7ba648a62903831059ed3564fa1245cc245d58d7"}, + {file = "librt-0.7.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a3cfb15961e7333ea6ef033dc574af75153b5c230d5ad25fbcd55198f21e0cf"}, + {file = "librt-0.7.5-cp314-cp314t-win32.whl", hash = "sha256:118716de5ad6726332db1801bc90fa6d94194cd2e07c1a7822cebf12c496714d"}, + {file = "librt-0.7.5-cp314-cp314t-win_amd64.whl", hash = "sha256:3dd58f7ce20360c6ce0c04f7bd9081c7f9c19fc6129a3c705d0c5a35439f201d"}, + {file = "librt-0.7.5-cp314-cp314t-win_arm64.whl", hash = "sha256:08153ea537609d11f774d2bfe84af39d50d5c9ca3a4d061d946e0c9d8bce04a1"}, + {file = "librt-0.7.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:df2e210400b28e50994477ebf82f055698c79797b6ee47a1669d383ca33263e1"}, + {file = "librt-0.7.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d2cc7d187e8c6e9b7bdbefa9697ce897a704ea7a7ce844f2b4e0e2aa07ae51d3"}, + {file = "librt-0.7.5-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:39183abee670bc37b85f11e86c44a9cad1ed6efa48b580083e89ecee13dd9717"}, + {file = "librt-0.7.5-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:191cbd42660446d67cf7a95ac7bfa60f49b8b3b0417c64f216284a1d86fc9335"}, + {file = "librt-0.7.5-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ea1b60b86595a5dc1f57b44a801a1c4d8209c0a69518391d349973a4491408e6"}, + {file = "librt-0.7.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:af69d9e159575e877c7546d1ee817b4ae089aa221dd1117e20c24ad8dc8659c7"}, + {file = "librt-0.7.5-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0e2bf8f91093fac43e3eaebacf777f12fd539dce9ec5af3efc6d8424e96ccd49"}, + {file = "librt-0.7.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8dcae24de1bc9da93aa689cb6313c70e776d7cea2fcf26b9b6160fedfe6bd9af"}, + {file = "librt-0.7.5-cp39-cp39-win32.whl", hash = "sha256:cdb001a1a0e4f41e613bca2c0fc147fc8a7396f53fc94201cbfd8ec7cd69ca4b"}, + {file = "librt-0.7.5-cp39-cp39-win_amd64.whl", hash = "sha256:a9eacbf983319b26b5f340a2e0cd47ac1ee4725a7f3a72fd0f15063c934b69d6"}, + {file = "librt-0.7.5.tar.gz", hash = "sha256:de4221a1181fa9c8c4b5f35506ed6f298948f44003d84d2a8b9885d7e01e6cfa"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "multidict" +version = "6.7.0" +description = "multidict implementation" +optional = true +python-versions = ">=3.9" +files = [ + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36"}, + {file = "multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85"}, + {file = "multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7"}, + {file = "multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34"}, + {file = "multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff"}, + {file = "multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81"}, + {file = "multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8"}, + {file = "multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4"}, + {file = "multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b"}, + {file = "multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288"}, + {file = "multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17"}, + {file = "multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390"}, + {file = "multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6"}, + {file = "multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d"}, + {file = "multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6"}, + {file = "multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f"}, + {file = "multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885"}, + {file = "multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c"}, + {file = "multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0"}, + {file = "multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13"}, + {file = "multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd"}, + {file = "multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4"}, + {file = "multidict-6.7.0-cp39-cp39-win32.whl", hash = "sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91"}, + {file = "multidict-6.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f"}, + {file = "multidict-6.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546"}, + {file = "multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3"}, + {file = "multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "mypy" +version = "1.19.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74"}, + {file = "mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1"}, + {file = "mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331"}, + {file = "mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925"}, + {file = "mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8"}, + {file = "mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a"}, + {file = "mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef"}, + {file = "mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75"}, + {file = "mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045"}, + {file = "mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957"}, + {file = "mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bcfc336a03a1aaa26dfce9fff3e287a3ba99872a157561cbfcebe67c13308e3"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b7951a701c07ea584c4fe327834b92a30825514c868b1f69c30445093fdd9d5a"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b13cfdd6c87fc3efb69ea4ec18ef79c74c3f98b4e5498ca9b85ab3b2c2329a67"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f28f99c824ecebcdaa2e55d82953e38ff60ee5ec938476796636b86afa3956e"}, + {file = "mypy-1.19.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c608937067d2fc5a4dd1a5ce92fd9e1398691b8c5d012d66e1ddd430e9244376"}, + {file = "mypy-1.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:409088884802d511ee52ca067707b90c883426bd95514e8cfda8281dc2effe24"}, + {file = "mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247"}, + {file = "mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba"}, +] + +[package.dependencies] +librt = {version = ">=0.6.2", markers = "platform_python_implementation != \"PyPy\""} +mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "propcache" +version = "0.4.1" +description = "Accelerated property cache" +optional = true +python-versions = ">=3.9" +files = [ + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c2d1fa3201efaf55d730400d945b5b3ab6e672e100ba0f9a409d950ab25d7db"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1eb2994229cc8ce7fe9b3db88f5465f5fd8651672840b2e426b88cdb1a30aac8"}, + {file = "propcache-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:66c1f011f45a3b33d7bcb22daed4b29c0c9e2224758b6be00686731e1b46f925"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9a52009f2adffe195d0b605c25ec929d26b36ef986ba85244891dee3b294df21"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5d4e2366a9c7b837555cf02fb9be2e3167d333aff716332ef1b7c3a142ec40c5"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:9d2b6caef873b4f09e26ea7e33d65f42b944837563a47a94719cc3544319a0db"}, + {file = "propcache-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b16ec437a8c8a965ecf95739448dd938b5c7f56e67ea009f4300d8df05f32b7"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:296f4c8ed03ca7476813fe666c9ea97869a8d7aec972618671b33a38a5182ef4"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:1f0978529a418ebd1f49dad413a2b68af33f85d5c5ca5c6ca2a3bed375a7ac60"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900"}, + {file = "propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c"}, + {file = "propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb"}, + {file = "propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37"}, + {file = "propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5"}, + {file = "propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3d902a36df4e5989763425a8ab9e98cd8ad5c52c823b34ee7ef307fd50582566"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a9695397f85973bb40427dedddf70d8dc4a44b22f1650dd4af9eedf443d45165"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2bb07ffd7eaad486576430c89f9b215f9e4be68c4866a96e97db9e97fead85dc"}, + {file = "propcache-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd6f30fdcf9ae2a70abd34da54f18da086160e4d7d9251f81f3da0ff84fc5a48"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc38cba02d1acba4e2869eef1a57a43dfbd3d49a59bf90dda7444ec2be6a5570"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:67fad6162281e80e882fb3ec355398cf72864a54069d060321f6cd0ade95fe85"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757"}, + {file = "propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f"}, + {file = "propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1"}, + {file = "propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6"}, + {file = "propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403"}, + {file = "propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4"}, + {file = "propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9"}, + {file = "propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75"}, + {file = "propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8"}, + {file = "propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db"}, + {file = "propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311"}, + {file = "propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c"}, + {file = "propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61"}, + {file = "propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66"}, + {file = "propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81"}, + {file = "propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e"}, + {file = "propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566"}, + {file = "propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b"}, + {file = "propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7"}, + {file = "propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1"}, + {file = "propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717"}, + {file = "propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37"}, + {file = "propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c"}, + {file = "propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44"}, + {file = "propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49"}, + {file = "propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144"}, + {file = "propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f"}, + {file = "propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153"}, + {file = "propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393"}, + {file = "propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc"}, + {file = "propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36"}, + {file = "propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455"}, + {file = "propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85"}, + {file = "propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1"}, + {file = "propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3d233076ccf9e450c8b3bc6720af226b898ef5d051a2d145f7d765e6e9f9bcff"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:357f5bb5c377a82e105e44bd3d52ba22b616f7b9773714bff93573988ef0a5fb"}, + {file = "propcache-0.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbc3b6dfc728105b2a57c06791eb07a94229202ea75c59db644d7d496b698cac"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:182b51b421f0501952d938dc0b0eb45246a5b5153c50d42b495ad5fb7517c888"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4b536b39c5199b96fc6245eb5fb796c497381d3942f169e44e8e392b29c9ebcc"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db65d2af507bbfbdcedb254a11149f894169d90488dd3e7190f7cdcb2d6cd57a"}, + {file = "propcache-0.4.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd2dbc472da1f772a4dae4fa24be938a6c544671a912e30529984dd80400cd88"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:daede9cd44e0f8bdd9e6cc9a607fc81feb80fae7a5fc6cecaff0e0bb32e42d00"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:71b749281b816793678ae7f3d0d84bd36e694953822eaad408d682efc5ca18e0"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:0002004213ee1f36cfb3f9a42b5066100c44276b9b72b4e1504cddd3d692e86e"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fe49d0a85038f36ba9e3ffafa1103e61170b28e95b16622e11be0a0ea07c6781"}, + {file = "propcache-0.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:99d43339c83aaf4d32bda60928231848eee470c6bda8d02599cc4cebe872d183"}, + {file = "propcache-0.4.1-cp39-cp39-win32.whl", hash = "sha256:a129e76735bc792794d5177069691c3217898b9f5cee2b2661471e52ffe13f19"}, + {file = "propcache-0.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:948dab269721ae9a87fd16c514a0a2c2a1bdb23a9a61b969b0f9d9ee2968546f"}, + {file = "propcache-0.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:5fd37c406dd6dc85aa743e214cef35dc54bbdd1419baac4f6ae5e5b1a2976938"}, + {file = "propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237"}, + {file = "propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d"}, +] + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pycoin" +version = "0.92.20241201" +description = "Utilities for Bitcoin and altcoin addresses and transaction manipulation." +optional = false +python-versions = "*" +files = [ + {file = "pycoin-0.92.20241201.tar.gz", hash = "sha256:6e937be181573ccf02b35064844bec46de130386b45f3df196d3074a8c790512"}, +] + +[[package]] +name = "pycparser" +version = "2.23" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934"}, + {file = "pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2"}, +] + +[[package]] +name = "pyflakes" +version = "3.2.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, +] + +[[package]] +name = "pytest" +version = "8.1.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.1.2-py3-none-any.whl", hash = "sha256:6c06dc309ff46a05721e6fd48e492a775ed8165d2ecdf57f156a80c7e95bb142"}, + {file = "pytest-8.1.2.tar.gz", hash = "sha256:f3c45d1d5eed96b01a2aea70dee6a4a366d51d38f9957768083e4fecfc77f3ef"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.4,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "5.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "structlog" +version = "22.3.0" +description = "Structured Logging for Python" +optional = true +python-versions = ">=3.7" +files = [ + {file = "structlog-22.3.0-py3-none-any.whl", hash = "sha256:b403f344f902b220648fa9f286a23c0cc5439a5844d271fec40562dbadbc70ad"}, + {file = "structlog-22.3.0.tar.gz", hash = "sha256:e7509391f215e4afb88b1b80fa3ea074be57a5a17d794bd436a5c949da023333"}, +] + +[package.extras] +dev = ["structlog[docs,tests,typing]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] +tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy", "rich", "twisted"] + +[[package]] +name = "tomli" +version = "2.3.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:88bd15eb972f3664f5ed4b57c1634a97153b4bac4479dcb6a495f41921eb7f45"}, + {file = "tomli-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:883b1c0d6398a6a9d29b508c331fa56adbcdff647f6ace4dfca0f50e90dfd0ba"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1381caf13ab9f300e30dd8feadb3de072aeb86f1d34a8569453ff32a7dea4bf"}, + {file = "tomli-2.3.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a0e285d2649b78c0d9027570d4da3425bdb49830a6156121360b3f8511ea3441"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a154a9ae14bfcf5d8917a59b51ffd5a3ac1fd149b71b47a3a104ca4edcfa845"}, + {file = "tomli-2.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:74bf8464ff93e413514fefd2be591c3b0b23231a77f901db1eb30d6f712fc42c"}, + {file = "tomli-2.3.0-cp311-cp311-win32.whl", hash = "sha256:00b5f5d95bbfc7d12f91ad8c593a1659b6387b43f054104cda404be6bda62456"}, + {file = "tomli-2.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:4dc4ce8483a5d429ab602f111a93a6ab1ed425eae3122032db7e9acf449451be"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7d86942e56ded512a594786a5ba0a5e521d02529b3826e7761a05138341a2ac"}, + {file = "tomli-2.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:73ee0b47d4dad1c5e996e3cd33b8a76a50167ae5f96a2607cbe8cc773506ab22"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:792262b94d5d0a466afb5bc63c7daa9d75520110971ee269152083270998316f"}, + {file = "tomli-2.3.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f195fe57ecceac95a66a75ac24d9d5fbc98ef0962e09b2eddec5d39375aae52"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e31d432427dcbf4d86958c184b9bfd1e96b5b71f8eb17e6d02531f434fd335b8"}, + {file = "tomli-2.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b0882799624980785240ab732537fcfc372601015c00f7fc367c55308c186f6"}, + {file = "tomli-2.3.0-cp312-cp312-win32.whl", hash = "sha256:ff72b71b5d10d22ecb084d345fc26f42b5143c5533db5e2eaba7d2d335358876"}, + {file = "tomli-2.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:1cb4ed918939151a03f33d4242ccd0aa5f11b3547d0cf30f7c74a408a5b99878"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5192f562738228945d7b13d4930baffda67b69425a7f0da96d360b0a3888136b"}, + {file = "tomli-2.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:be71c93a63d738597996be9528f4abe628d1adf5e6eb11607bc8fe1a510b5dae"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4665508bcbac83a31ff8ab08f424b665200c0e1e645d2bd9ab3d3e557b6185b"}, + {file = "tomli-2.3.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4021923f97266babc6ccab9f5068642a0095faa0a51a246a6a02fccbb3514eaf"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4ea38c40145a357d513bffad0ed869f13c1773716cf71ccaa83b0fa0cc4e42f"}, + {file = "tomli-2.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ad805ea85eda330dbad64c7ea7a4556259665bdf9d2672f5dccc740eb9d3ca05"}, + {file = "tomli-2.3.0-cp313-cp313-win32.whl", hash = "sha256:97d5eec30149fd3294270e889b4234023f2c69747e555a27bd708828353ab606"}, + {file = "tomli-2.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:0c95ca56fbe89e065c6ead5b593ee64b84a26fca063b5d71a1122bf26e533999"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:cebc6fe843e0733ee827a282aca4999b596241195f43b4cc371d64fc6639da9e"}, + {file = "tomli-2.3.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4c2ef0244c75aba9355561272009d934953817c49f47d768070c3c94355c2aa3"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c22a8bf253bacc0cf11f35ad9808b6cb75ada2631c2d97c971122583b129afbc"}, + {file = "tomli-2.3.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0eea8cc5c5e9f89c9b90c4896a8deefc74f518db5927d0e0e8d4a80953d774d0"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b74a0e59ec5d15127acdabd75ea17726ac4c5178ae51b85bfe39c4f8a278e879"}, + {file = "tomli-2.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5870b50c9db823c595983571d1296a6ff3e1b88f734a4c8f6fc6188397de005"}, + {file = "tomli-2.3.0-cp314-cp314-win32.whl", hash = "sha256:feb0dacc61170ed7ab602d3d972a58f14ee3ee60494292d384649a3dc38ef463"}, + {file = "tomli-2.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:b273fcbd7fc64dc3600c098e39136522650c49bca95df2d11cf3b626422392c8"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:940d56ee0410fa17ee1f12b817b37a4d4e4dc4d27340863cc67236c74f582e77"}, + {file = "tomli-2.3.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f85209946d1fe94416debbb88d00eb92ce9cd5266775424ff81bc959e001acaf"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a56212bdcce682e56b0aaf79e869ba5d15a6163f88d5451cbde388d48b13f530"}, + {file = "tomli-2.3.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f3ffd1e098dfc032d4d3af5c0ac64f6d286d98bc148698356847b80fa4de1b"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5e01decd096b1530d97d5d85cb4dff4af2d8347bd35686654a004f8dea20fc67"}, + {file = "tomli-2.3.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8a35dd0e643bb2610f156cca8db95d213a90015c11fee76c946aa62b7ae7e02f"}, + {file = "tomli-2.3.0-cp314-cp314t-win32.whl", hash = "sha256:a1f7f282fe248311650081faafa5f4732bdbfef5d45fe3f2e702fbc6f2d496e0"}, + {file = "tomli-2.3.0-cp314-cp314t-win_amd64.whl", hash = "sha256:70a251f8d4ba2d9ac2542eecf008b3c8a9fc5c3f9f02c56a9d7952612be2fdba"}, + {file = "tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b"}, + {file = "tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549"}, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +files = [ + {file = "typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548"}, + {file = "typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466"}, +] + +[[package]] +name = "yarl" +version = "1.22.0" +description = "Yet another URL library" +optional = true +python-versions = ">=3.9" +files = [ + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c7bd6683587567e5a49ee6e336e0612bec8329be1b7d4c8af5687dcdeb67ee1e"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5cdac20da754f3a723cceea5b3448e1a2074866406adeb4ef35b469d089adb8f"}, + {file = "yarl-1.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07a524d84df0c10f41e3ee918846e1974aba4ec017f990dc735aad487a0bdfdf"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1b329cb8146d7b736677a2440e422eadd775d1806a81db2d4cded80a48efc1a"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:75976c6945d85dbb9ee6308cd7ff7b1fb9409380c82d6119bd778d8fcfe2931c"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:80ddf7a5f8c86cb3eb4bc9028b07bbbf1f08a96c5c0bc1244be5e8fefcb94147"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d332fc2e3c94dad927f2112395772a4e4fedbcf8f80efc21ed7cdfae4d574fdb"}, + {file = "yarl-1.22.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0cf71bf877efeac18b38d3930594c0948c82b64547c1cf420ba48722fe5509f6"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:663e1cadaddae26be034a6ab6072449a8426ddb03d500f43daf952b74553bba0"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6dcbb0829c671f305be48a7227918cfcd11276c2d637a8033a99a02b67bf9eda"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737"}, + {file = "yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467"}, + {file = "yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea"}, + {file = "yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca"}, + {file = "yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6"}, + {file = "yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e"}, + {file = "yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6"}, + {file = "yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e"}, + {file = "yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca"}, + {file = "yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b"}, + {file = "yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2"}, + {file = "yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82"}, + {file = "yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d"}, + {file = "yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520"}, + {file = "yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8"}, + {file = "yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c"}, + {file = "yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a"}, + {file = "yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2"}, + {file = "yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02"}, + {file = "yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67"}, + {file = "yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95"}, + {file = "yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d"}, + {file = "yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3"}, + {file = "yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708"}, + {file = "yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f"}, + {file = "yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62"}, + {file = "yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03"}, + {file = "yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249"}, + {file = "yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683"}, + {file = "yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da"}, + {file = "yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd"}, + {file = "yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da"}, + {file = "yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2"}, + {file = "yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79"}, + {file = "yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca"}, + {file = "yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b"}, + {file = "yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093"}, + {file = "yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c"}, + {file = "yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e"}, + {file = "yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27"}, + {file = "yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3aa27acb6de7a23785d81557577491f6c38a5209a254d1191519d07d8fe51748"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:af74f05666a5e531289cb1cc9c883d1de2088b8e5b4de48004e5ca8a830ac859"}, + {file = "yarl-1.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:62441e55958977b8167b2709c164c91a6363e25da322d87ae6dd9c6019ceecf9"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b580e71cac3f8113d3135888770903eaf2f507e9421e5697d6ee6d8cd1c7f054"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e81fda2fb4a07eda1a2252b216aa0df23ebcd4d584894e9612e80999a78fd95b"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:99b6fc1d55782461b78221e95fc357b47ad98b041e8e20f47c1411d0aacddc60"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:088e4e08f033db4be2ccd1f34cf29fe994772fb54cfe004bbf54db320af56890"}, + {file = "yarl-1.22.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4e1f6f0b4da23e61188676e3ed027ef0baa833a2e633c29ff8530800edccba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:84fc3ec96fce86ce5aa305eb4aa9358279d1aa644b71fab7b8ed33fe3ba1a7ca"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5dbeefd6ca588b33576a01b0ad58aa934bc1b41ef89dee505bf2932b22ddffba"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14291620375b1060613f4aab9ebf21850058b6b1b438f386cc814813d901c60b"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a4fcfc8eb2c34148c118dfa02e6427ca278bfd0f3df7c5f99e33d2c0e81eae3e"}, + {file = "yarl-1.22.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:029866bde8d7b0878b9c160e72305bbf0a7342bcd20b9999381704ae03308dc8"}, + {file = "yarl-1.22.0-cp39-cp39-win32.whl", hash = "sha256:4dcc74149ccc8bba31ce1944acee24813e93cfdee2acda3c172df844948ddf7b"}, + {file = "yarl-1.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:10619d9fdee46d20edc49d3479e2f8269d0779f1b031e6f7c2aa1c76be04b7ed"}, + {file = "yarl-1.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:dd7afd3f8b0bfb4e0d9fc3c31bfe8a4ec7debe124cfd90619305def3c8ca8cd2"}, + {file = "yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff"}, + {file = "yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + +[extras] +client = ["aiohttp", "structlog"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.9,<4" +content-hash = "c767f164bbb834257be64e95379045828434e7224a9b43b4e565f78a0f25c78f" diff --git a/hathorlib/pyproject.toml b/hathorlib/pyproject.toml new file mode 100644 index 000000000..e3c5f9af6 --- /dev/null +++ b/hathorlib/pyproject.toml @@ -0,0 +1,81 @@ +# Copyright 2020 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[tool.poetry] +name = "hathorlib" +version = "0.14.0" +description = "Hathor Network base objects library" +authors = ["Hathor Team "] +license = "Apache-2.0" +readme = "README.md" +homepage = "https://hathor.network/" +repository = "https://github.com/HathorNetwork/python-hathorlib/" +# https://pypi.org/classifiers/ +classifiers = [ + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "License :: OSI Approved :: Apache Software License", +] +include = ["hathorlib/py.typed"] +exclude = ["tests", "tests.*"] + +[tool.poetry.dependencies] +python = ">=3.9,<4" +base58 = "~2.1.1" +structlog = {version = "~22.3.0", optional = true} +aiohttp = {version = "~3.9.3", optional = true} +cryptography = "~42.0.5" +pycoin = "~0.92" + +[tool.poetry.dev-dependencies] +isort = {version = "~5.13.2", extras = ["colors"]} +mypy = {version = "^1.9.0", markers = "implementation_name == 'cpython'"} +pytest = "~8.1.1" +pytest-cov = "~5.0.0" +flake8 = "~7.0.0" + +[tool.poetry.extras] +client = ["aiohttp", "structlog"] + +[tool.isort] +combine_as_imports = true +default_section = "THIRDPARTY" +include_trailing_comma = true +known_first_party = "hathorlib,tests" +line_length = 119 +multi_line_output = 3 + +[tool.mypy] +pretty = true +disallow_incomplete_defs = true +no_implicit_optional = true +extra_checks = true +disallow_untyped_decorators = true +warn_redundant_casts = true +warn_unused_configs = true +warn_unused_ignores = true +namespace_packages = true +show_error_codes = true +show_error_context = true + +[tool.pytest.ini_options] +minversion = "6.0" +testpaths = ["tests"] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/hathorlib/setup.cfg b/hathorlib/setup.cfg new file mode 100644 index 000000000..791f075d0 --- /dev/null +++ b/hathorlib/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length = 119 diff --git a/hathorlib/tests/__init__.py b/hathorlib/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathorlib/tests/test_basic.py b/hathorlib/tests/test_basic.py new file mode 100644 index 000000000..0495ad3b5 --- /dev/null +++ b/hathorlib/tests/test_basic.py @@ -0,0 +1,271 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import unittest + +from hathorlib import Block, TokenCreationTransaction, Transaction +from hathorlib.base_transaction import tx_or_block_from_bytes +from hathorlib.conf import HathorSettings +from hathorlib.scripts import create_output_script +from hathorlib.utils import decode_address + +settings = HathorSettings() + + +class HathorCommonsTestCase(unittest.TestCase): + def test_block_basics(self): + data = bytes.fromhex('000001ffffffe8b789180000001976a9147fd4ae0e4fb2d2854e76d359029d8078bb9' + '9649e88ac40350000000000005e0f84a9000000000000000000000000000000278a7e') + block = Block.create_from_struct(data) + self.assertTrue(block.verify_pow()) + self.assertEqual(data, bytes(block)) + + # These prints are here to test the methods. + self.assertEqual( + str(block), + 'Block(nonce=2591358, timestamp=1578075305, version=0, weight=21.000000, ' + 'hash=000006cb93385b8b87a545a1cbb6197e6caff600c12cc12fc54250d39c8088fc)' + ) + self.assertEqual( + repr(block), + 'Block(nonce=2591358, timestamp=1578075305, version=0, weight=21.000000, ' + 'hash=000006cb93385b8b87a545a1cbb6197e6caff600c12cc12fc54250d39c8088fc, ' + 'inputs=[], outputs=[TxOutput(token_data=0b0, value=100000000000)], parents=[], data=)') + self.assertEqual(block.get_struct_nonce().hex(), '00000000000000000000000000278a7e') + + block.nonce += 1 + block.update_hash() + self.assertFalse(block.verify_pow()) + + def test_tx_basics(self): + data = bytes.fromhex('0001000102000001e0e88216036e4e52872ba60a96df7570c3e29cc30eda6dd92ea0fd' + '304c00006a4730450221009fa4798bb69f66035013063c13f1a970ec58111bcead277d' + '9c93e45c2b6885fe022012e039b26cc4a4cb0a8a5abb7deb7bb78610ed362bf422efa2' + '47db37c5a841e12102bc1213ea99ab55effcff760f94c09f8b1a0b7b990c01128d06b4' + 'a8c5c5f41f8400089f0800001976a91438fb3bc92b76819e9c19ef7c079d327c8fcd19' + '9288ac02de2d3800001976a9148d880c42ddcf78a2da5d06558f13515508720b4088ac' + '403518509c63f9195ecfd7d40200001ea9d6e1d31da6893fcec594dc3fa8b6819ae126' + '8c190f7a1441302226e2000007d1c5add7b9085037cfc591f1008dff4fe8a9158fd1a4' + '840a6dd5d4e4e600d2da8d') + tx = Transaction.create_from_struct(data) + + self.assertEqual(data, bytes(tx)) + self.assertTrue(tx.verify_pow()) + self.assertTrue(tx.is_transaction) + self.assertFalse(tx.is_block) + + # These prints are here to test the methods. + print(str(tx)) + print(repr(tx)) + + tx.nonce += 1 + tx.update_hash() + self.assertFalse(tx.verify_pow()) + + def test_token_creation_basics(self): + data = bytes.fromhex('00020104000005551d7740fd7d3c0acc50b5677fdd844f1225985aa431e1712af2a2fd' + '8900006a473045022100a445edb5cd6c79a0a7b5ed837582fd65b8d511ee60b64fd076' + 'e07bd8f63f75a202202dca24320bffc4c3ca2a07cdfff38f7c839bde70ed49ef634ac6' + '588972836cab2103bfa995d676e3c0ed7b863c74cfef9683fab3163b42b6f21442326a' + '023fc57fba0000264800001976a9146876f9578221fdb678d4e8376503098a9228b132' + '88ac00004e2001001976a914031761ef85a24603203c97e75af355b83209f08f88ac00' + '00000181001976a9149f091256cb98649c7c35df0aad44d7805710691e88ac00000002' + '81001976a914b1d7a5ee505ad4d3b93ea1a5162ba83d5049ec4e88ac0109546f546865' + '4d6f6f6e04f09f9a804034a52aec6cece75e0fc0e30200001a72272f48339fcc5d5ec5' + 'deaf197855964b0eb912e8c6eefe00928b6cf600001055641c20b71871ed2c5c7d4096' + 'a34f40888d79c25bce74421646e732dc01ff7369') + tx = TokenCreationTransaction.create_from_struct(data) + + self.assertEqual(data, bytes(tx)) + self.assertTrue(tx.verify_pow()) + self.assertTrue(tx.is_transaction) + self.assertFalse(tx.is_block) + + # These prints are here to test the methods. + self.assertEqual( + str(tx), + 'TokenCreationTransaction(nonce=33518441, timestamp=1578090723, version=2, weight=20.645186, ' + 'hash=00000828d80dd4cd809c959139f7b4261df41152f4cce65a8777eb1c3a1f9702, ' + 'token_name=ToTheMoon, token_symbol=🚀, token_version=1)' + ) + self.assertEqual( + repr(tx), + 'TokenCreationTransaction(nonce=33518441, timestamp=1578090723, version=2, weight=20.645186, ' + 'hash=00000828d80dd4cd809c959139f7b4261df41152f4cce65a8777eb1c3a1f9702, ' + 'inputs=[TxInput(tx_id=000005551d7740fd7d3c0acc50b5677fdd844f1225985aa431e1712af2a2fd89, index=0)], ' + 'outputs=[TxOutput(token_data=0b0, value=9800), TxOutput(token_data=0b1, value=20000), ' + 'TxOutput(token_data=0b10000001, value=0b1), TxOutput(token_data=0b10000001, value=0b10)], ' + 'parents=[\'00001a72272f48339fcc5d5ec5deaf197855964b0eb912e8c6eefe00928b6cf6\', ' + '\'00001055641c20b71871ed2c5c7d4096a34f40888d79c25bce74421646e732dc\'])' + ) + + tx.nonce += 1 + tx.update_hash() + self.assertFalse(tx.verify_pow()) + + def test_token_creation_with_fee_header(self): + """Test TokenCreationTransaction with fee header""" + from hathorlib.token_creation_tx import TokenCreationTransaction, TokenVersion + + data = bytes.fromhex( + '0002010400000672c17c8fcf7277eece0b8cbe3f0efbdf6205e5e8554ccff5ca85ec8e49000069463044022070c5bfcd3b2f177' + 'c842de1937c8a089bec64ea2d27754056fb7d7882e731aad7022073b6811313a52f74a88cedbbb2d951ddd5c6d2bba97332eea74' + '2e020d7717f04210299138e77a8039c31a112941480231cccefc9e627fef5ff4a391e7a2689b319d40000000900001976a914ba6' + 'a16b0ab2c2bf132e1cfbdc01ef86a8c749a7188ac0000006401001976a914ba6a16b0ab2c2bf132e1cfbdc01ef86a8c749a7188a' + 'c0000000181001976a914ba6a16b0ab2c2bf132e1cfbdc01ef86a8c749a7188ac0000000281001976a914ba6a16b0ab2c2bf132e' + '1cfbdc01ef86a8c749a7188ac0209546f6b656e4e616d6503544b4e4031b96d6968b53e690472ad000000000011010000000001' + ) + + tx = TokenCreationTransaction.create_from_struct(data) + + # Verify the token version is FEE (2) + self.assertEqual(tx.token_version, TokenVersion.FEE) + + # Verify the transaction can be serialized and deserialized correctly + self.assertEqual(data, bytes(tx)) + + # Verify basic transaction properties + self.assertTrue(tx.is_transaction) + self.assertFalse(tx.is_block) + self.assertTrue(tx.has_fees()) + + # Verify the fee header contains the expected fee entry + fee_header = tx.get_fee_header() + self.assertEqual(len(fee_header.fees), 1) + self.assertEqual(fee_header.fees[0].token_index, 0) + self.assertEqual(fee_header.fees[0].amount, 1) + + self.assertEqual(len(fee_header.get_fees()), 1) + self.assertEqual(fee_header.get_fees()[0].amount, 1) + self.assertEqual(fee_header.get_fees()[0].token_uid, settings.HATHOR_TOKEN_UID) + + # Verify the string representation includes token_version=2 + str_repr = str(tx) + self.assertIn('token_version=2', str_repr) + self.assertIn('token_name=TokenName', str_repr) + self.assertIn('token_symbol=TKN', str_repr) + + def test_script_basics(self): + create_output_script(decode_address('HVZjvL1FJ23kH3buGNuttVRsRKq66WHUVZ')) + + def test_standard_tx(self): + data = bytes.fromhex('0001000102000001e0e88216036e4e52872ba60a96df7570c3e29cc30eda6dd92ea0fd' + '304c00006a4730450221009fa4798bb69f66035013063c13f1a970ec58111bcead277d' + '9c93e45c2b6885fe022012e039b26cc4a4cb0a8a5abb7deb7bb78610ed362bf422efa2' + '47db37c5a841e12102bc1213ea99ab55effcff760f94c09f8b1a0b7b990c01128d06b4' + 'a8c5c5f41f8400089f0800001976a91438fb3bc92b76819e9c19ef7c079d327c8fcd19' + '9288ac02de2d3800001976a9148d880c42ddcf78a2da5d06558f13515508720b4088ac' + '403518509c63f9195ecfd7d40200001ea9d6e1d31da6893fcec594dc3fa8b6819ae126' + '8c190f7a1441302226e2000007d1c5add7b9085037cfc591f1008dff4fe8a9158fd1a4' + '840a6dd5d4e4e600d2da8d') + + tx = tx_or_block_from_bytes(data) + self.assertTrue(tx.is_standard()) + + # Change the first output to have script size bigger than allowed + tx.outputs[0].script = b'x' * (settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + 1) + tx_bytes_big = bytes(tx) + tx2 = tx_or_block_from_bytes(tx_bytes_big) + self.assertFalse(tx2.is_standard()) + self.assertFalse(tx2.is_standard(std_max_output_script_size=settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + 1)) + self.assertTrue( + tx2.is_standard( + std_max_output_script_size=settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + 1, only_standard_script_type=False + ) + ) + + # Make first output non standard + tx.outputs[0].script = b'x' * settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + tx_bytes_non_standard = bytes(tx) + tx3 = tx_or_block_from_bytes(tx_bytes_non_standard) + self.assertFalse(tx3.is_standard()) + self.assertTrue(tx3.is_standard(only_standard_script_type=False)) + + def test_tx_with_nano_and_fee_headers(self): + """Test Transaction with NanoHeader and FeeHeader""" + from hathorlib.headers import FeeHeader, NanoHeader + + data = bytes.fromhex( + '0001010102a63cd61c1265d2ddcaf9e59072e999be92f1e8b3a9f80d3059667ebd07acff8200000b55' + '0310ce5d2405848b90497875979809205758c3be336d58fa4b358e7400006946304402201ce9d15038' + '2e74fb123fdb9b418372ab02f5e342eef77302711ba25bbad6fc2a022079552147f44d01fe5339e3e1' + '1748b5fb7bce1f45389daf8314d60a50c87b3a2e21036acf7120c9c95d917ab44ebf223f9c9fe202c3' + 'f61a963469ac99dadbb7f066450000006401001976a914ce852b6869b2e6a78341beaf68e301784696' + '605e88ac0000225f00001976a914cc62dd4e0d45b3c92768eb8d31d32ee203aa968088ac4034451999' + '38bb1f694ac1350000000000100000096571b0cae543f7b16d395b19b655a1266210de1892fd127c3' + '15aa04ff105046e6f6f700001000102010000006449ce852b6869b2e6a78341beaf68e30178469660' + '5e819040746946304402201ce9d150382e74fb123fdb9b418372ab02f5e342eef77302711ba25bbad6' + 'fc2a022079552147f44d01fe5339e3e11748b5fb7bce1f45389daf8314d60a50c87b3a2e21036acf71' + '20c9c95d917ab44ebf223f9c9fe202c3f61a963469ac99dadbb7f0664511010000000001' + ) + + tx = Transaction.create_from_struct(data) + + # Verify the transaction can be serialized and deserialized correctly + self.assertEqual(data, bytes(tx)) + + # Verify basic transaction properties + self.assertTrue(tx.is_transaction) + self.assertFalse(tx.is_block) + self.assertTrue(tx.has_fees()) + self.assertTrue(tx.is_nano_contract()) + + # Verify transaction structure + self.assertEqual(len(tx.inputs), 1) + self.assertEqual(len(tx.outputs), 2) + self.assertEqual(len(tx.tokens), 1) + self.assertEqual(len(tx.headers), 2) + + # Verify the headers are of correct types + nano_header = tx.get_nano_header() + fee_header = tx.get_fee_header() + self.assertIsInstance(nano_header, NanoHeader) + self.assertIsInstance(fee_header, FeeHeader) + + # Verify the fee header contains the expected fee entry + self.assertEqual(len(fee_header.fees), 1) + self.assertEqual(fee_header.fees[0].token_index, 0) + self.assertEqual(fee_header.fees[0].amount, 1) + + # Verify the nano header has expected method + self.assertEqual(nano_header.nc_method, 'noop') + + def test_tx_version_and_signal_bits(self): + from hathorlib.base_transaction import TxVersion + + # test invalid type + with self.assertRaises(AssertionError) as cm: + TxVersion('test') + + self.assertEqual(str(cm.exception), "Value 'test' must be an integer") + + # test one byte max value + with self.assertRaises(AssertionError) as cm: + TxVersion(0x100) + + self.assertEqual(str(cm.exception), 'Value 0x100 must not be larger than one byte') + + # test invalid version + with self.assertRaises(ValueError) as cm: + TxVersion(10) + + self.assertEqual(str(cm.exception), 'Invalid version: 10') + + # test get the correct class + version = TxVersion(0x00) + self.assertEqual(version.get_cls(), Block) + version = TxVersion(0x01) + self.assertEqual(version.get_cls(), Transaction) + + # test serialization doesn't mess up with signal_bits and version + data = bytes.fromhex('f00001ffffffe8b789180000001976a9147fd4ae0e4fb2d2854e76d359029d8078bb9' + '9649e88ac40350000000000005e0f84a9000000000000000000000000000000278a7e') + block = Block.create_from_struct(data) + block2 = block.clone() + + self.assertEqual(block.signal_bits, block2.signal_bits) + self.assertEqual(block.version, block2.version) diff --git a/hathorlib/tests/test_client.py b/hathorlib/tests/test_client.py new file mode 100644 index 000000000..4dd12c06e --- /dev/null +++ b/hathorlib/tests/test_client.py @@ -0,0 +1,183 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from contextlib import asynccontextmanager +from typing import AsyncIterator +from unittest import IsolatedAsyncioTestCase +from unittest.mock import Mock + +from hathorlib.client import HathorClient +from hathorlib.exceptions import PushTxFailed +from tests.test_util import AsyncMock + + +class ClientTestCase(IsolatedAsyncioTestCase): + async def asyncSetUp(self) -> None: + self.client = HathorClient(server_url='') + await self.client.start() + + async def test_push_block(self) -> None: + # Preparation + hex = ('000001ffffffe8b789180000001976a9147fd4ae0e4fb2d2854e76d359029d8078bb9' + '9649e88ac40350000000000005e0f84a9000000000000000000000000000000278a7e') + + data = bytes.fromhex(hex) + + class MockResponse: + def __init__(self): + self.status = 200 + + async def json(self): + return {"result": "success"} + + self.client._session = Mock() + self.client._session.post = AsyncMock(return_value=MockResponse()) + + # Execution + await self.client.push_tx_or_block(data) + + # Assertion + self.client._session.post.assert_called_once_with( + 'v1a/submit_block', + json={'hexdata': hex} + ) + + async def test_push_transaction(self) -> None: + # Preparation + hex = ('0001000102000001e0e88216036e4e52872ba60a96df7570c3e29cc30eda6dd92ea0fd' + '304c00006a4730450221009fa4798bb69f66035013063c13f1a970ec58111bcead277d' + '9c93e45c2b6885fe022012e039b26cc4a4cb0a8a5abb7deb7bb78610ed362bf422efa2' + '47db37c5a841e12102bc1213ea99ab55effcff760f94c09f8b1a0b7b990c01128d06b4' + 'a8c5c5f41f8400089f0800001976a91438fb3bc92b76819e9c19ef7c079d327c8fcd19' + '9288ac02de2d3800001976a9148d880c42ddcf78a2da5d06558f13515508720b4088ac' + '403518509c63f9195ecfd7d40200001ea9d6e1d31da6893fcec594dc3fa8b6819ae126' + '8c190f7a1441302226e2000007d1c5add7b9085037cfc591f1008dff4fe8a9158fd1a4' + '840a6dd5d4e4e600d2da8d') + + data = bytes.fromhex(hex) + + class MockResponse: + def __init__(self): + self.status = 200 + + async def json(self): + return {"result": "success"} + + self.client._session = Mock() + self.client._session.post = AsyncMock(return_value=MockResponse()) + + # Execution + await self.client.push_tx_or_block(data) + + # Assertion + self.client._session.post.assert_called_once_with( + 'v1a/push_tx', + json={'hex_tx': hex} + ) + + async def test_push_tx_or_block_error(self) -> None: + # Preparation + class MockResponse: + def __init__(self): + self.status = 500 + + async def text(self): + return "Test Response" + + async def post_mock(url, json): + return MockResponse() + + self.client._session = Mock() + self.client._session.post = post_mock + + # Execution + with self.assertRaises(PushTxFailed): + data = bytes.fromhex('000001ffffffe8b789180000001976a9147fd4ae0e4fb2d2854e76d359029d8078bb9' + '9649e88ac40350000000000005e0f84a9000000000000000000000000000000278a7e') + await self.client.push_tx_or_block(data) + + async def test_get_block_template(self) -> None: + # Preparation + class MockResponse: + def __init__(self): + self.status = 200 + + async def json(self): + return dict( + timestamp=12345, + parents=['01', '02', '03'], + weight=60, + outputs=[dict(value=6400)], + signal_bits=0b0101, + metadata=dict( + height=999 + ) + ) + + self.client._session = Mock() + self.client._session.get = AsyncMock(return_value=MockResponse()) + + # Execution + template = await self.client.get_block_template(address='my_address') + + # Assertion + expected_data = '05000100001900000000404e00000000000000003039030102030000000000000000000000000000000000' + expected_height = 999 + + self.assertEqual(template.data.hex(), expected_data) + self.assertEqual(template.height, expected_height) + + self.client._session.get.assert_called_once_with( + 'v1a/get_block_template', + params=dict(address='my_address') + ) + + async def test_version(self) -> None: + # Preparation + versions = [ + "1.2.3", + "1.2.3-rc.2", + "1.2.3-rc.2+build.2", + "1.2.3+build.2", + ] + + class MockResponse: + def __init__(self): + self.status = 200 + self.version = None + + async def json(self): + return {"version": self.version} + + mock_response = MockResponse() + self.client._session = Mock() + + @asynccontextmanager + async def get_mock(url: str) -> AsyncIterator[MockResponse]: + yield mock_response + + self.client._session.get = get_mock + + # Execution + for version in versions: + mock_response.version = version + result = await self.client.version() + + # Assertion + self.assertEqual(result.major, 1) + self.assertEqual(result.minor, 2) + self.assertEqual(result.patch, 3) + + if version.endswith('-rc.2+build.2'): + self.assertEqual(result.metadata, 'build.2') + self.assertEqual(result.prerelease, 'rc.2') + elif version.endswith('+build.2'): + self.assertEqual(result.metadata, 'build.2') + self.assertIsNone(result.prerelease) + elif version.endswith('-rc.2'): + self.assertIsNone(result.metadata) + self.assertEqual(result.prerelease, 'rc.2') diff --git a/hathorlib/tests/test_daa.py b/hathorlib/tests/test_daa.py new file mode 100644 index 000000000..8f455dc18 --- /dev/null +++ b/hathorlib/tests/test_daa.py @@ -0,0 +1,32 @@ +# Copyright (c) Hathor Labs and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import unittest + +from hathorlib import Transaction +from hathorlib.daa import minimum_tx_weight + + +class HathorDAATestCase(unittest.TestCase): + def test_address_from_pubkey(self): + tx_bytes = bytes.fromhex( + '0001000102000000004b9f8c309247d44d8f242252516eff16cd4a4b6c7dfab2eea05a8a3101006a4730450' + '22100b5ccb3f4e2ebd5a16a6bdf14e0d392f0f02429f52c6260a14e79da1e1841fc58022048177cf5b0479f' + '37ff5c907a2e75cc1e0ba257608fb4156dad4d57143b60d7c52103548024000a2f7974de7abf7a391ec2552' + 'd653d460153bff2aaa2885b6612eb9c000041e300001976a914555ccdd5fbd8286b10afe5d5f49d4be6db25' + '113e88ac0000006400001976a91471fe2456c0dc242a022478d4928707c4720943a588ac40339ccd44c989a' + 'f6056491602000000009e59fbcbdaffc47b564b43af41f395a65b4eabb9e7667d6ad5ce2af6000002c69153' + '8af910fc12d475f5fd468bf4a50ecd89b08cdd1bf82a355444b1541e5618' + ) + tx = Transaction.create_from_struct(tx_bytes) + min_tx_weight = minimum_tx_weight(tx) + self.assertAlmostEqual(tx.weight, min_tx_weight, places=4) + + tx.parents = [] + min_tx_weight2 = minimum_tx_weight(tx) + self.assertAlmostEqual(min_tx_weight, min_tx_weight2) + + min_tx_weight3 = minimum_tx_weight(tx, fix_parents=False) + self.assertNotAlmostEqual(min_tx_weight, min_tx_weight3) diff --git a/hathorlib/tests/test_data_script.py b/hathorlib/tests/test_data_script.py new file mode 100644 index 000000000..15d642d13 --- /dev/null +++ b/hathorlib/tests/test_data_script.py @@ -0,0 +1,77 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import unittest + +from hathorlib.base_transaction import TxOutput, tx_or_block_from_bytes +from hathorlib.conf import HathorSettings +from hathorlib.scripts import DataScript + +settings = HathorSettings() + + +class HathorDataScriptTestCase(unittest.TestCase): + def test_script_data(self): + # Create NFT script data test + data = 'nft data test' + obj_data = DataScript(data) + human = obj_data.to_human_readable() + self.assertEqual(human['type'], 'Data') + self.assertEqual(human['data'], data) + + script = obj_data.get_script() + + parsed_obj = DataScript.parse_script(script) + self.assertEqual(parsed_obj.data, data) + + # Parse output script from real NFT + data = bytes.fromhex('00020103000023117762f80fad7c28eea89e793036e8e5855038eee4deea02c53d7513e700006a473045022' + '100eab17bbadcd5297695847c7e81a9d9c8b7995b9816a8cb2db4f68721eef22d44022043e8b9498a557cd2' + 'f8f4e957241cc78fee4daf0e149de5b9529048ee1ca0140e2103e42187c715fbdd129ef40bf9c6c9c63a6e0' + 'd72d478d121fa23c6078fa5049457000000010000060454455354ac0000012c01001976a91495b3e7b7559a' + '2b1ffa6c337fc6aeff74e963796588ac0000000281001976a914e7b6fadc93b5553781d73ac908134c0bbc5' + '14e6b88ac01065465737474740354535440200000218def416127d5800200d9741624399388d196e5e40959' + '5e65a1803764ee078f34ebb2bda63ff6a63a001a2603c9a5947233dedb1160e9468e95563e76945ae58d829' + '118e17e668dc900000053') + tx = tx_or_block_from_bytes(data) + nft_script = DataScript.parse_script(tx.outputs[0].script) + self.assertEqual(nft_script.data, 'TEST') + + self.assertFalse(tx.outputs[0].is_standard_script()) + self.assertTrue(tx.outputs[1].is_standard_script()) + + def test_tx_with_script_data(self): + # Parse output script from real test tx + # This tx has a data script output and it's not an NFT creation tx + data = bytes.fromhex('0001010202000041a564f1d090bbf23f7f370eee970ded2270aa2ff59e4632deb2a746d28500ff62bcebf5d' + 'f2827d98f6f3113c1226d555d5cafc77b914e4411698c3382e503006a47304502205a984dab561ff8f97a4f' + 'c09d889f844de4fb66b32edc19e77bd84e58fa91bd61022100ef6bfa2e6c8b7f8eb41561b9b012b60fc41a3' + '9742cea74c4e0152be3ff98cbc421026f9b6b0b5d3badb218999d865b47ca70dc052920ca663d13eecf3176' + '2ed308ee003d11dacb7449dc7caf081223cfefb571e3ae4ec60da8eb74a201d516f3f3da01006a473045022' + '05a984dab561ff8f97a4fc09d889f844de4fb66b32edc19e77bd84e58fa91bd61022100ef6bfa2e6c8b7f8e' + 'b41561b9b012b60fc41a39742cea74c4e0152be3ff98cbc421026f9b6b0b5d3badb218999d865b47ca70dc0' + '52920ca663d13eecf31762ed308ee000000010000464468747470733a2f2f697066732e696f2f697066732f' + '516d586656704d6b52463475674254666a5361367a566f6e6d4b4a31466f6e43717434774d39354b5453463' + '756622fac0000000101001976a914aa8de9f415b80986c8827580d267ff963cca41e688ac40200000000000' + '00620bdc9702003d11dacb7449dc7caf081223cfefb571e3ae4ec60da8eb74a201d516f3f3da004aa11e1d1' + 'bc4d2c7b26e4f1b42b6da66b2add6bd562e8f1f59ec25b005e7a20000001a') + tx = tx_or_block_from_bytes(data) + self.assertTrue(tx.is_standard()) + + # Now we will add outputs until the max number of outputs + number_of_data_script_outputs = 1 + + while number_of_data_script_outputs < settings.MAX_DATA_SCRIPT_OUTPUTS: + new_output = TxOutput(1, tx.outputs[0].script, 0) + tx.outputs.append(new_output) + self.assertTrue(tx.is_standard()) + number_of_data_script_outputs += 1 + + # If we add one more, then it should become non standard + new_output = TxOutput(1, tx.outputs[0].script, 0) + tx.outputs.append(new_output) + self.assertFalse(tx.is_standard()) diff --git a/hathorlib/tests/test_deprecated.py b/hathorlib/tests/test_deprecated.py new file mode 100644 index 000000000..1c402fa70 --- /dev/null +++ b/hathorlib/tests/test_deprecated.py @@ -0,0 +1,40 @@ +import pytest + +from hathorlib.base_transaction import tx_or_block_from_bytes +from hathorlib.nanocontracts import DeprecatedNanoContract + + +@pytest.mark.parametrize( + ['hex_hash', 'hex_bytes'], + [ + # Without actions + ( + '000081fc23f06c2e0198e92d88bae373b9291281eaa1bde70a25895e8f395ebe', + '0004000000013cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e7715950a696e697469616c697a650024001' + '976a9145f6557d55ebd9b9f17ac6d3dec9e62c3983e0f9d88ac000100000467d3162d21038125cdd1ba7942439d1cca8a622ce046' + 'ba94549375f8125b166a4c9f9545a9044730450221009ce1c5bd1f53a3123bbce623fb3ce54460814bb8fba7bee3a2d147a6d32e0' + 'd87022066857e268dd8e84272543ab3e5ba9e8389155be5f289116df07e76a1204f33e24030f50e7c7b57cb67d308ce0200000744' + '71704d198d5ebcfa31bc281d69a1d900c0c197444386d7bdf5db13c4000016cfc9ea80a9faebd599af3eb1a6c50308e2c74e003c4' + 'a502b9bddbf639400ff68b3', + ), + + # With actions + ( + '0000540ff09eff4811932fd954f7e070c37a36428d73c931af243eff43bb970b', + '00040000010000012c00001976a9145f6557d55ebd9b9f17ac6d3dec9e62c3983e0f9d88ac010000049be6b42e863d93c304519e6' + 'fa2e1731529b0ca3958b1a2f36c869fbd5c087769746864726177000021038125cdd1ba7942439d1cca8a622ce046ba94549375f8' + '125b166a4c9f9545a9044730450221009d12fc897c1a78658c8448f0c5b733f8f0019c079c51ab5d0f1804f58a9f128502202d16b' + '85ae939d2d8e023dec0f466aa3cf62c4839ed1a82d54f191ac516bf21e9403105b214e1b93767db0afb02000026ff3dd377bfab1e' + '643caa5bc4b51981cead3a53389610f8b3eb6df89c300000006f1d0156981bc023f2e99c1d3ee653418ff2a2da5a187d07d8a7dfe' + '26900fbcd09' + ) + ] +) +def test_deprecated_nano_contract(hex_bytes: str, hex_hash: str) -> None: + tx_bytes = bytes.fromhex(hex_bytes) + expected_tx_hash = bytes.fromhex(hex_hash) + + tx = tx_or_block_from_bytes(tx_bytes) + assert isinstance(tx, DeprecatedNanoContract) + assert tx.hash == expected_tx_hash + assert bytes(tx) == tx_bytes diff --git a/hathorlib/tests/test_nanocontract.py b/hathorlib/tests/test_nanocontract.py new file mode 100644 index 000000000..cfda8dca4 --- /dev/null +++ b/hathorlib/tests/test_nanocontract.py @@ -0,0 +1,75 @@ +# Copyright (c) Hathor Labs and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import unittest + +from hathorlib import Transaction +from hathorlib.headers import NanoHeader, VertexHeaderId +from hathorlib.headers.nano_header import NanoHeaderAction +from hathorlib.nanocontracts.types import NCActionType + + +class NCNanoContractTestCase(unittest.TestCase): + def _get_nc(self) -> Transaction: + nc = Transaction() + nc.weight = 1 + nc.timestamp = 123456 + nano_header = NanoHeader( + tx=nc, + nc_seqnum=123, + nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=123, + ), + ], + nc_id=b'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx', + nc_method='initialize', + # ['string', 1] + nc_args_bytes=b'\x00\x06string\x00\x04\x00\x00\x00\x01', + nc_address=bytes.fromhex('280ff02e7049b7b15356a1d8108d2d8cda79b65ddf84403239'), + nc_script=bytes.fromhex('47304502206db7372dde8dfaac7364d6cd13517e3fc0d75fea09bc3c6a425e5607fcec3f93022100a' + 'aadfbdab62eaa65e2a6031ff04fccd283e9d653a80a858cb97dd101e5c689ae2102d6c0adc88c4e80' + '8f1aa1ee0fbce19f082613c0603eeb90764702f859b55c615b') + ) + nc.headers = [nano_header] + return nc + + def test_serialization(self) -> None: + nc = self._get_nc() + + nc_bytes = bytes(nc) + nc2 = Transaction.create_from_struct(nc_bytes) + self.assertEqual(nc_bytes, bytes(nc2)) + nano_header1 = nc.get_nano_header() + nano_header2 = nc2.get_nano_header() + assert isinstance(nano_header1, NanoHeader) + assert isinstance(nano_header2, NanoHeader) + + self.assertEqual(nano_header1.nc_seqnum, nano_header2.nc_seqnum) + self.assertEqual(nano_header1.nc_id, nano_header2.nc_id) + self.assertEqual(nano_header1.nc_method, nano_header2.nc_method) + self.assertEqual(nano_header1.nc_args_bytes, nano_header2.nc_args_bytes) + self.assertEqual(nano_header1.nc_address, nano_header2.nc_address) + self.assertEqual(nano_header1.nc_script, nano_header2.nc_script) + self.assertEqual(nano_header1.nc_actions, nano_header2.nc_actions) + + def test_serialization_skip_signature(self) -> None: + nc = self._get_nc() + nano_header = nc.get_nano_header() + sighash_bytes = nano_header.get_sighash_bytes() + deserialized, buf = NanoHeader.deserialize(Transaction(), VertexHeaderId.NANO_HEADER.value + sighash_bytes) + assert isinstance(nano_header, NanoHeader) + assert isinstance(deserialized, NanoHeader) + + assert len(buf) == 0 + assert deserialized.nc_seqnum == nano_header.nc_seqnum + assert deserialized.nc_id == nano_header.nc_id + assert deserialized.nc_method == nano_header.nc_method + assert deserialized.nc_args_bytes == nano_header.nc_args_bytes + assert deserialized.nc_actions == nano_header.nc_actions + assert deserialized.nc_address == nano_header.nc_address + assert deserialized.nc_script == b'' diff --git a/hathorlib/tests/test_nft.py b/hathorlib/tests/test_nft.py new file mode 100644 index 000000000..64270654c --- /dev/null +++ b/hathorlib/tests/test_nft.py @@ -0,0 +1,59 @@ +""" +Copyright (c) Hathor Labs and its affiliates. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import unittest + +from hathorlib.base_transaction import TxOutput, tx_or_block_from_bytes + + +class HathorNFTTestCase(unittest.TestCase): + def test_is_nft(self): + # Normal tx + data = bytes.fromhex('000100010100c994a3f1b46ddeb7134f65cb18b1b11ca7e19d59875a704b2bb2f79f6700b60000694630440' + '220066d379c43ee73c3704730a44d66a077fb2b1cee2b399cbcf87f34d2b2d84308022032e0a93662094c5d' + 'b4ed022708981717d06038924535257d181c2fa9f62a6ff9210310a7cd9cae728ddf8c7fef342f963b1cab1' + '97d97b28124ebbd0208d60d9f08780000000200001976a914e7c8133e7611a0ef57830f4321661ff9e5c42f' + '4188ac40200000218def41612cefe10200002d0403a9e39e8176b2e8ca6728f7c8393cea3403f4432c047e5' + 'b28cb0470009ed2ab70b799729bcdbaa8edc064bd78fb258ea23fe6688272acad587445ab0000000c') + tx = tx_or_block_from_bytes(data) + self.assertFalse(tx.is_nft_creation_standard()) + self.assertTrue(tx.is_standard()) + + # Create token tx + data2 = bytes.fromhex('0002010400b25b5385d9bbe80018a98884fdb2d63de3404c23e1b6695df34c103755b56900006a473045022' + '100b05b56237bd425ceeedc1bed82660239ae5cba5790e58980072a6d7a0b00ad500220729c456675abbee1' + '2b084ea841779ec26fe9d4ac4c3a6b2b004678ba697c66e72102c79cca85e51de1e3e85a232477d3be574aa' + '8d83c975321ac1993143d18401f3c0000006401001976a914bdd06a2ec4f180e5f3f5752671a771544c3936' + '4a88ac0000000181001976a914bdd06a2ec4f180e5f3f5752671a771544c39364a88ac0000000281001976a' + '914bdd06a2ec4f180e5f3f5752671a771544c39364a88ac0000138700001976a914439d757c69635d48ddb2' + 'a106a18ea5c1ce158d8488ac0106544f4b454e3104544b4e314032320a39bd7d606127f3ff02009ed2ab70b' + '799729bcdbaa8edc064bd78fb258ea23fe6688272acad587445ab00d9741624399388d196e5e409595e65a1' + '803764ee078f34ebb2bda63ff6a63a000104d8') + tx2 = tx_or_block_from_bytes(data2) + self.assertFalse(tx2.is_nft_creation_standard()) + self.assertTrue(tx2.is_standard()) + + # NFT tx + data3 = bytes.fromhex('00020103000023117762f80fad7c28eea89e793036e8e5855038eee4deea02c53d7513e700006a473045022' + '100eab17bbadcd5297695847c7e81a9d9c8b7995b9816a8cb2db4f68721eef22d44022043e8b9498a557cd2' + 'f8f4e957241cc78fee4daf0e149de5b9529048ee1ca0140e2103e42187c715fbdd129ef40bf9c6c9c63a6e0' + 'd72d478d121fa23c6078fa5049457000000010000060454455354ac0000012c01001976a91495b3e7b7559a' + '2b1ffa6c337fc6aeff74e963796588ac0000000281001976a914e7b6fadc93b5553781d73ac908134c0bbc5' + '14e6b88ac01065465737474740354535440200000218def416127d5800200d9741624399388d196e5e40959' + '5e65a1803764ee078f34ebb2bda63ff6a63a001a2603c9a5947233dedb1160e9468e95563e76945ae58d829' + '118e17e668dc900000053') + tx3 = tx_or_block_from_bytes(data3) + self.assertTrue(tx3.is_nft_creation_standard()) + self.assertTrue(tx3.is_standard()) + + # NFT custom tx with 2 data script outputs + tx4 = tx_or_block_from_bytes(data3) + # Add new data script output, creating a token creation tx with 2 script data outputs + # This should be rejected as a standard NFT + new_output = TxOutput(1, tx4.outputs[0].script, 0) + tx4.outputs = [tx4.outputs[0], new_output] + tx4.outputs[1:] + self.assertFalse(tx4.is_nft_creation_standard()) diff --git a/hathorlib/tests/test_on_chain_blueprint.py b/hathorlib/tests/test_on_chain_blueprint.py new file mode 100644 index 000000000..d70838b01 --- /dev/null +++ b/hathorlib/tests/test_on_chain_blueprint.py @@ -0,0 +1,38 @@ +# Copyright (c) Hathor Labs and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import unittest + +from hathorlib.nanocontracts.on_chain_blueprint import Code, CodeKind, OnChainBlueprint + + +class OnChainBlueprintTestCase(unittest.TestCase): + def _get_ocb(self): + ocb = OnChainBlueprint() + ocb.weight = 1 + ocb.timestamp = 123456 + ocb.nc_pubkey = b'\x020\xc1K\xb8\xc4fO>\xb7\x96a\xdeN\x96\x92\xcd\x1c' \ + b'\xa8\xa3]\xfeZ\xf7}\x95\x99\xb0\x1cBE\xc8\x90' + ocb.nc_signature = b'0F\x02!\x00\x9c\xfey\xb1C\x9eAJ\x9eU~\xe3\xaf\xfcQ' \ + b'\xf6\xf0`g\x1b0\xb6\xca\x1b\xed\x83:N\xa0\x98\xd2' \ + b'\xdf\x02!\x00\xbe\xf85\xf6O`\xfed`Ip\xe2a\xc4\x03vv' \ + b'\xec\x94\ny?\xde\x90\xc3\x12\x9c\xd8\xdd\xd8\xe5\r' + code = Code(CodeKind.PYTHON_ZLIB, b'') + ocb.code = code + return ocb + + def test_serialization(self): + ocb = self._get_ocb() + + ocb_bytes = bytes(ocb) + ocb2 = OnChainBlueprint.create_from_struct(ocb_bytes) + self.assertEqual(ocb_bytes, bytes(ocb2)) + + self.assertEqual(ocb.weight, ocb2.weight) + self.assertEqual(ocb.timestamp, ocb2.timestamp) + self.assertEqual(ocb.nc_pubkey, ocb2.nc_pubkey) + self.assertEqual(ocb.nc_signature, ocb2.nc_signature) + self.assertEqual(ocb.code.kind, ocb2.code.kind) + self.assertEqual(ocb.code.data, ocb2.code.data) diff --git a/hathorlib/tests/test_util.py b/hathorlib/tests/test_util.py new file mode 100644 index 000000000..701bf4490 --- /dev/null +++ b/hathorlib/tests/test_util.py @@ -0,0 +1,23 @@ +# Copyright (c) Hathor Labs and its affiliates. +# +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import base64 +import unittest +from unittest.mock import MagicMock + +from hathorlib.utils import get_address_b58_from_public_key, get_public_key_from_bytes_compressed + + +class HathorUtilsTestCase(unittest.TestCase): + def test_address_from_pubkey(self): + pubkey_bytes = base64.b64decode("AzDv7fmrf98FfyThpHcHmuEM80vQCi04pnMohBvItqY8") + pubkey = get_public_key_from_bytes_compressed(pubkey_bytes) + address_b58 = get_address_b58_from_public_key(pubkey) + self.assertEqual('HURjYEBdMPtk7kVYBKyHCWc3HAvjrx3unT', address_b58) + + +class AsyncMock(MagicMock): + async def __call__(self, *args, **kwargs): + return super(AsyncMock, self).__call__(*args, **kwargs) diff --git a/poetry.lock b/poetry.lock index ed088cd0c..fda97f764 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.3.1 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -265,65 +265,79 @@ files = [ [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" or implementation_name == \"pypy\"" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -376,14 +390,14 @@ test = ["pytest"] [[package]] name = "configargparse" -version = "1.5.5" +version = "1.7.1" description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" groups = ["main"] files = [ - {file = "ConfigArgParse-1.5.5-py3-none-any.whl", hash = "sha256:541360ddc1b15c517f95c0d02d1fca4591266628f3667acdc5d13dccc78884ca"}, - {file = "ConfigArgParse-1.5.5.tar.gz", hash = "sha256:363d80a6d35614bd446e2f2b1b216f3b33741d03ac6d0a92803306f40e555b58"}, + {file = "configargparse-1.7.1-py3-none-any.whl", hash = "sha256:8b586a31f9d873abd1ca527ffbe58863c99f36d896e2829779803125e83be4b6"}, + {file = "configargparse-1.7.1.tar.gz", hash = "sha256:79c2ddae836a1e5914b71d58e4b9adbd9f7779d4e6351a637b7d2d9b6c46d3d9"}, ] [package.extras] @@ -727,24 +741,26 @@ test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>= [[package]] name = "hathorlib" -version = "0.12.0" +version = "0.14.0" description = "Hathor Network base objects library" optional = false -python-versions = "<4,>=3.9" +python-versions = ">=3.9,<4" groups = ["main"] -files = [ - {file = "hathorlib-0.12.0-py3-none-any.whl", hash = "sha256:f9868399519eac5efdec2c93e2fa122fcc1cf6b74fcd1efac918573d690caaa5"}, - {file = "hathorlib-0.12.0.tar.gz", hash = "sha256:09828665d081c57218b74427bf85c559e79b29c93c998fce4a49a3fd83d6c7a3"}, -] +files = [] +develop = false [package.dependencies] -base58 = ">=2.1.1,<2.2.0" -cryptography = ">=42.0.5,<42.1.0" -pycoin = ">=0.92,<0.93" +base58 = "~2.1.1" +cryptography = "~42.0.5" +pycoin = "~0.92" [package.extras] client = ["aiohttp (>=3.9.3,<3.10.0)", "structlog (>=22.3.0,<22.4.0)"] +[package.source] +type = "directory" +url = "hathorlib" + [[package]] name = "hyperlink" version = "21.0.0" @@ -958,6 +974,93 @@ traitlets = ">=5.3" docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "librt" +version = "0.7.7" +description = "Mypyc runtime library" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +markers = "implementation_name == \"cpython\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "librt-0.7.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4836c5645f40fbdc275e5670819bde5ab5f2e882290d304e3c6ddab1576a6d0"}, + {file = "librt-0.7.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae8aec43117a645a31e5f60e9e3a0797492e747823b9bda6972d521b436b4e8"}, + {file = "librt-0.7.7-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:aea05f701ccd2a76b34f0daf47ca5068176ff553510b614770c90d76ac88df06"}, + {file = "librt-0.7.7-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7b16ccaeff0ed4355dfb76fe1ea7a5d6d03b5ad27f295f77ee0557bc20a72495"}, + {file = "librt-0.7.7-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48c7e150c095d5e3cea7452347ba26094be905d6099d24f9319a8b475fcd3e0"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4dcee2f921a8632636d1c37f1bbdb8841d15666d119aa61e5399c5268e7ce02e"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14ef0f4ac3728ffd85bfc58e2f2f48fb4ef4fa871876f13a73a7381d10a9f77c"}, + {file = "librt-0.7.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e4ab69fa37f8090f2d971a5d2bc606c7401170dbdae083c393d6cbf439cb45b8"}, + {file = "librt-0.7.7-cp310-cp310-win32.whl", hash = "sha256:4bf3cc46d553693382d2abf5f5bd493d71bb0f50a7c0beab18aa13a5545c8900"}, + {file = "librt-0.7.7-cp310-cp310-win_amd64.whl", hash = "sha256:f0c8fe5aeadd8a0e5b0598f8a6ee3533135ca50fd3f20f130f9d72baf5c6ac58"}, + {file = "librt-0.7.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a487b71fbf8a9edb72a8c7a456dda0184642d99cd007bc819c0b7ab93676a8ee"}, + {file = "librt-0.7.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f4d4efb218264ecf0f8516196c9e2d1a0679d9fb3bb15df1155a35220062eba8"}, + {file = "librt-0.7.7-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b8bb331aad734b059c4b450cd0a225652f16889e286b2345af5e2c3c625c3d85"}, + {file = "librt-0.7.7-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:467dbd7443bda08338fc8ad701ed38cef48194017554f4c798b0a237904b3f99"}, + {file = "librt-0.7.7-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50d1d1ee813d2d1a3baf2873634ba506b263032418d16287c92ec1cc9c1a00cb"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c7e5070cf3ec92d98f57574da0224f8c73faf1ddd6d8afa0b8c9f6e86997bc74"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:bdb9f3d865b2dafe7f9ad7f30ef563c80d0ddd2fdc8cc9b8e4f242f475e34d75"}, + {file = "librt-0.7.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8185c8497d45164e256376f9da5aed2bb26ff636c798c9dabe313b90e9f25b28"}, + {file = "librt-0.7.7-cp311-cp311-win32.whl", hash = "sha256:44d63ce643f34a903f09ff7ca355aae019a3730c7afd6a3c037d569beeb5d151"}, + {file = "librt-0.7.7-cp311-cp311-win_amd64.whl", hash = "sha256:7d13cc340b3b82134f8038a2bfe7137093693dcad8ba5773da18f95ad6b77a8a"}, + {file = "librt-0.7.7-cp311-cp311-win_arm64.whl", hash = "sha256:983de36b5a83fe9222f4f7dcd071f9b1ac6f3f17c0af0238dadfb8229588f890"}, + {file = "librt-0.7.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a85a1fc4ed11ea0eb0a632459ce004a2d14afc085a50ae3463cd3dfe1ce43fc"}, + {file = "librt-0.7.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c87654e29a35938baead1c4559858f346f4a2a7588574a14d784f300ffba0efd"}, + {file = "librt-0.7.7-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c9faaebb1c6212c20afd8043cd6ed9de0a47d77f91a6b5b48f4e46ed470703fe"}, + {file = "librt-0.7.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1908c3e5a5ef86b23391448b47759298f87f997c3bd153a770828f58c2bb4630"}, + {file = "librt-0.7.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbc4900e95a98fc0729523be9d93a8fedebb026f32ed9ffc08acd82e3e181503"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a7ea4e1fbd253e5c68ea0fe63d08577f9d288a73f17d82f652ebc61fa48d878d"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ef7699b7a5a244b1119f85c5bbc13f152cd38240cbb2baa19b769433bae98e50"}, + {file = "librt-0.7.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:955c62571de0b181d9e9e0a0303c8bc90d47670a5eff54cf71bf5da61d1899cf"}, + {file = "librt-0.7.7-cp312-cp312-win32.whl", hash = "sha256:1bcd79be209313b270b0e1a51c67ae1af28adad0e0c7e84c3ad4b5cb57aaa75b"}, + {file = "librt-0.7.7-cp312-cp312-win_amd64.whl", hash = "sha256:4353ee891a1834567e0302d4bd5e60f531912179578c36f3d0430f8c5e16b456"}, + {file = "librt-0.7.7-cp312-cp312-win_arm64.whl", hash = "sha256:a76f1d679beccccdf8c1958e732a1dfcd6e749f8821ee59d7bec009ac308c029"}, + {file = "librt-0.7.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f4a0b0a3c86ba9193a8e23bb18f100d647bf192390ae195d84dfa0a10fb6244"}, + {file = "librt-0.7.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5335890fea9f9e6c4fdf8683061b9ccdcbe47c6dc03ab8e9b68c10acf78be78d"}, + {file = "librt-0.7.7-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b4346b1225be26def3ccc6c965751c74868f0578cbcba293c8ae9168483d811"}, + {file = "librt-0.7.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a10b8eebdaca6e9fdbaf88b5aefc0e324b763a5f40b1266532590d5afb268a4c"}, + {file = "librt-0.7.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:067be973d90d9e319e6eb4ee2a9b9307f0ecd648b8a9002fa237289a4a07a9e7"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:23d2299ed007812cccc1ecef018db7d922733382561230de1f3954db28433977"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:6b6f8ea465524aa4c7420c7cc4ca7d46fe00981de8debc67b1cc2e9957bb5b9d"}, + {file = "librt-0.7.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8df32a99cc46eb0ee90afd9ada113ae2cafe7e8d673686cf03ec53e49635439"}, + {file = "librt-0.7.7-cp313-cp313-win32.whl", hash = "sha256:86f86b3b785487c7760247bcdac0b11aa8bf13245a13ed05206286135877564b"}, + {file = "librt-0.7.7-cp313-cp313-win_amd64.whl", hash = "sha256:4862cb2c702b1f905c0503b72d9d4daf65a7fdf5a9e84560e563471e57a56949"}, + {file = "librt-0.7.7-cp313-cp313-win_arm64.whl", hash = "sha256:0996c83b1cb43c00e8c87835a284f9057bc647abd42b5871e5f941d30010c832"}, + {file = "librt-0.7.7-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:23daa1ab0512bafdd677eb1bfc9611d8ffbe2e328895671e64cb34166bc1b8c8"}, + {file = "librt-0.7.7-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:558a9e5a6f3cc1e20b3168fb1dc802d0d8fa40731f6e9932dcc52bbcfbd37111"}, + {file = "librt-0.7.7-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2567cb48dc03e5b246927ab35cbb343376e24501260a9b5e30b8e255dca0d1d2"}, + {file = "librt-0.7.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6066c638cdf85ff92fc6f932d2d73c93a0e03492cdfa8778e6d58c489a3d7259"}, + {file = "librt-0.7.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a609849aca463074c17de9cda173c276eb8fee9e441053529e7b9e249dc8b8ee"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:add4e0a000858fe9bb39ed55f31085506a5c38363e6eb4a1e5943a10c2bfc3d1"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a3bfe73a32bd0bdb9a87d586b05a23c0a1729205d79df66dee65bb2e40d671ba"}, + {file = "librt-0.7.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0ecce0544d3db91a40f8b57ae26928c02130a997b540f908cefd4d279d6c5848"}, + {file = "librt-0.7.7-cp314-cp314-win32.whl", hash = "sha256:8f7a74cf3a80f0c3b0ec75b0c650b2f0a894a2cec57ef75f6f72c1e82cdac61d"}, + {file = "librt-0.7.7-cp314-cp314-win_amd64.whl", hash = "sha256:3d1fe2e8df3268dd6734dba33ededae72ad5c3a859b9577bc00b715759c5aaab"}, + {file = "librt-0.7.7-cp314-cp314-win_arm64.whl", hash = "sha256:2987cf827011907d3dfd109f1be0d61e173d68b1270107bb0e89f2fca7f2ed6b"}, + {file = "librt-0.7.7-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8e92c8de62b40bfce91d5e12c6e8b15434da268979b1af1a6589463549d491e6"}, + {file = "librt-0.7.7-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:f683dcd49e2494a7535e30f779aa1ad6e3732a019d80abe1309ea91ccd3230e3"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:9b15e5d17812d4d629ff576699954f74e2cc24a02a4fc401882dd94f81daba45"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c084841b879c4d9b9fa34e5d5263994f21aea7fd9c6add29194dbb41a6210536"}, + {file = "librt-0.7.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10c8fb9966f84737115513fecbaf257f9553d067a7dd45a69c2c7e5339e6a8dc"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:9b5fb1ecb2c35362eab2dbd354fd1efa5a8440d3e73a68be11921042a0edc0ff"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:d1454899909d63cc9199a89fcc4f81bdd9004aef577d4ffc022e600c412d57f3"}, + {file = "librt-0.7.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7ef28f2e7a016b29792fe0a2dd04dec75725b32a1264e390c366103f834a9c3a"}, + {file = "librt-0.7.7-cp314-cp314t-win32.whl", hash = "sha256:5e419e0db70991b6ba037b70c1d5bbe92b20ddf82f31ad01d77a347ed9781398"}, + {file = "librt-0.7.7-cp314-cp314t-win_amd64.whl", hash = "sha256:d6b7d93657332c817b8d674ef6bf1ab7796b4f7ce05e420fd45bd258a72ac804"}, + {file = "librt-0.7.7-cp314-cp314t-win_arm64.whl", hash = "sha256:142c2cd91794b79fd0ce113bd658993b7ede0fe93057668c2f98a45ca00b7e91"}, + {file = "librt-0.7.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c8ffe3431d98cc043a14e88b21288b5ec7ee12cb01260e94385887f285ef9389"}, + {file = "librt-0.7.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e40d20ae1722d6b8ea6acf4597e789604649dcd9c295eb7361a28225bc2e9e12"}, + {file = "librt-0.7.7-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f2cb63c49bc96847c3bb8dca350970e4dcd19936f391cfdfd057dcb37c4fa97e"}, + {file = "librt-0.7.7-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8f2f8dcf5ab9f80fb970c6fd780b398efb2f50c1962485eb8d3ab07788595a48"}, + {file = "librt-0.7.7-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a1f5cc41a570269d1be7a676655875e3a53de4992a9fa38efb7983e97cf73d7c"}, + {file = "librt-0.7.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ff1fb2dfef035549565a4124998fadcb7a3d4957131ddf004a56edeb029626b3"}, + {file = "librt-0.7.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ab2a2a9cd7d044e1a11ca64a86ad3361d318176924bbe5152fbc69f99be20b8c"}, + {file = "librt-0.7.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad3fc2d859a709baf9dd9607bb72f599b1cfb8a39eafd41307d0c3c4766763cb"}, + {file = "librt-0.7.7-cp39-cp39-win32.whl", hash = "sha256:f83c971eb9d2358b6a18da51dc0ae00556ac7c73104dde16e9e14c15aaf685ca"}, + {file = "librt-0.7.7-cp39-cp39-win_amd64.whl", hash = "sha256:264720fc288c86039c091a4ad63419a5d7cabbf1c1c9933336a957ed2483e570"}, + {file = "librt-0.7.7.tar.gz", hash = "sha256:81d957b069fed1890953c3b9c3895c7689960f233eea9a1d9607f71ce7f00b2c"}, +] + [[package]] name = "matplotlib-inline" version = "0.1.6" @@ -999,132 +1102,218 @@ files = [ [[package]] name = "multidict" -version = "6.0.4" +version = "6.7.0" description = "multidict implementation" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9f474ad5acda359c8758c8accc22032c6abe6dc87a8be2440d097785e27a9349"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a9db5a870f780220e931d0002bbfd88fb53aceb6293251e2c839415c1b20e"}, + {file = "multidict-6.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:03ca744319864e92721195fa28c7a3b2bc7b686246b35e4078c1e4d0eb5466d3"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f0e77e3c0008bc9316e662624535b88d360c3a5d3f81e15cf12c139a75250046"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08325c9e5367aa379a3496aa9a022fe8837ff22e00b94db256d3a1378c76ab32"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e2862408c99f84aa571ab462d25236ef9cb12a602ea959ba9c9009a54902fc73"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d72a9a2d885f5c208b0cb91ff2ed43636bb7e345ec839ff64708e04f69a13cc"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:478cc36476687bac1514d651cbbaa94b86b0732fb6855c60c673794c7dd2da62"}, + {file = "multidict-6.7.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6843b28b0364dc605f21481c90fadb5f60d9123b442eb8a726bb74feef588a84"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:23bfeee5316266e5ee2d625df2d2c602b829435fc3a235c2ba2131495706e4a0"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:680878b9f3d45c31e1f730eef731f9b0bc1da456155688c6745ee84eb818e90e"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:eb866162ef2f45063acc7a53a88ef6fe8bf121d45c30ea3c9cd87ce7e191a8d4"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111"}, + {file = "multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36"}, + {file = "multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85"}, + {file = "multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7"}, + {file = "multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721"}, + {file = "multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:a265acbb7bb33a3a2d626afbe756371dce0279e7b17f4f4eda406459c2b5ff1c"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51cb455de290ae462593e5b1cb1118c5c22ea7f0d3620d9940bf695cea5a4bd7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:db99677b4457c7a5c5a949353e125ba72d62b35f74e26da141530fbb012218a7"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f470f68adc395e0183b92a2f4689264d1ea4b40504a24d9882c27375e6662bb9"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0db4956f82723cc1c270de9c6e799b4c341d327762ec78ef82bb962f79cc07d8"}, + {file = "multidict-6.7.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3e56d780c238f9e1ae66a22d2adf8d16f485381878250db8d496623cd38b22bd"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9d14baca2ee12c1a64740d4531356ba50b82543017f3ad6de0deb943c5979abb"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:295a92a76188917c7f99cda95858c822f9e4aae5824246bba9b6b44004ddd0a6"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39f1719f57adbb767ef592a50ae5ebb794220d1188f9ca93de471336401c34d2"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b"}, + {file = "multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34"}, + {file = "multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff"}, + {file = "multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81"}, + {file = "multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45"}, + {file = "multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0934f3843a1860dd465d38895c17fce1f1cb37295149ab05cd1b9a03afacb2a7"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3e34f3a1b8131ba06f1a73adab24f30934d148afcd5f5de9a73565a4404384e"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:efbb54e98446892590dc2458c19c10344ee9a883a79b5cec4bc34d6656e8d546"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a35c5fc61d4f51eb045061e7967cfe3123d622cd500e8868e7c0c592a09fedc4"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29fe6740ebccba4175af1b9b87bf553e9c15cd5868ee967e010efcf94e4fd0f1"}, + {file = "multidict-6.7.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:123e2a72e20537add2f33a79e605f6191fba2afda4cbb876e35c1a7074298a7d"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b284e319754366c1aee2267a2036248b24eeb17ecd5dc16022095e747f2f4304"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:803d685de7be4303b5a657b76e2f6d1240e7e0a8aa2968ad5811fa2285553a12"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c04a328260dfd5db8c39538f999f02779012268f54614902d0afc775d44e0a62"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a"}, + {file = "multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8"}, + {file = "multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4"}, + {file = "multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b"}, + {file = "multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159"}, + {file = "multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cd240939f71c64bd658f186330603aac1a9a81bf6273f523fca63673cb7378a8"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60a4d75718a5efa473ebd5ab685786ba0c67b8381f781d1be14da49f1a2dc60"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53a42d364f323275126aff81fb67c5ca1b7a04fda0546245730a55c8c5f24bc4"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3b29b980d0ddbecb736735ee5bef69bb2ddca56eff603c86f3f29a1128299b4f"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f8a93b1c0ed2d04b97a5e9336fd2d33371b9a6e29ab7dd6503d63407c20ffbaf"}, + {file = "multidict-6.7.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ff96e8815eecacc6645da76c413eb3b3d34cfca256c70b16b286a687d013c32"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7516c579652f6a6be0e266aec0acd0db80829ca305c3d771ed898538804c2036"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:040f393368e63fb0f3330e70c26bfd336656bed925e5cbe17c9da839a6ab13ec"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b3bc26a951007b1057a1c543af845f1c7e3e71cc240ed1ace7bf4484aa99196e"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd"}, + {file = "multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288"}, + {file = "multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17"}, + {file = "multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390"}, + {file = "multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb"}, + {file = "multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:749a72584761531d2b9467cfbdfd29487ee21124c304c4b6cb760d8777b27f9c"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b4c3d199f953acd5b446bf7c0de1fe25d94e09e79086f8dc2f48a11a129cdf1"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9fb0211dfc3b51efea2f349ec92c114d7754dd62c01f81c3e32b765b70c45c9b"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a027ec240fe73a8d6281872690b988eed307cd7d91b23998ff35ff577ca688b5"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1d964afecdf3a8288789df2f5751dc0a8261138c3768d9af117ed384e538fad"}, + {file = "multidict-6.7.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:caf53b15b1b7df9fbd0709aa01409000a2b4dd03a5f6f5cc548183c7c8f8b63c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:654030da3197d927f05a536a66186070e98765aa5142794c9904555d3a9d8fb5"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2090d3718829d1e484706a2f525e50c892237b2bf9b17a79b059cb98cddc2f10"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d2cfeec3f6f45651b3d408c4acec0ebf3daa9bc8a112a084206f5db5d05b754"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762"}, + {file = "multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6"}, + {file = "multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d"}, + {file = "multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6"}, + {file = "multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b"}, + {file = "multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:31bae522710064b5cbeddaf2e9f32b1abab70ac6ac91d42572502299e9953128"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a0df7ff02397bb63e2fd22af2c87dfa39e8c7f12947bc524dbdc528282c7e34"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7a0222514e8e4c514660e182d5156a415c13ef0aabbd71682fc714e327b95e99"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2397ab4daaf2698eb51a76721e98db21ce4f52339e535725de03ea962b5a3202"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8891681594162635948a636c9fe0ff21746aeb3dd5463f6e25d9bea3a8a39ca1"}, + {file = "multidict-6.7.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18706cc31dbf402a7945916dd5cddf160251b6dab8a2c5f3d6d5a55949f676b3"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:f844a1bbf1d207dd311a56f383f7eda2d0e134921d45751842d8235e7778965d"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d4393e3581e84e5645506923816b9cc81f5609a778c7e7534054091acc64d1c6"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:fbd18dc82d7bf274b37aa48d664534330af744e03bccf696d6f4c6042e7d19e7"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f"}, + {file = "multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f"}, + {file = "multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885"}, + {file = "multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c"}, + {file = "multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718"}, + {file = "multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7bf77f54997a9166a2f5675d1201520586439424c2511723a7312bdb4bcc034e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e011555abada53f1578d63389610ac8a5400fc70ce71156b0aa30d326f1a5064"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:28b37063541b897fd6a318007373930a75ca6d6ac7c940dbe14731ffdd8d498e"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:05047ada7a2fde2631a0ed706f1fd68b169a681dfe5e4cf0f8e4cb6618bbc2cd"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:716133f7d1d946a4e1b91b1756b23c088881e70ff180c24e864c26192ad7534a"}, + {file = "multidict-6.7.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1bed1b467ef657f2a0ae62844a607909ef1c6889562de5e1d505f74457d0b96"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ca43bdfa5d37bd6aee89d85e1d0831fb86e25541be7e9d376ead1b28974f8e5e"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:44b546bd3eb645fd26fb949e43c02a25a2e632e2ca21a35e2e132c8105dc8599"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a6ef16328011d3f468e7ebc326f24c1445f001ca1dec335b2f8e66bed3006394"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9"}, + {file = "multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0"}, + {file = "multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13"}, + {file = "multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd"}, + {file = "multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:363eb68a0a59bd2303216d2346e6c441ba10d36d1f9969fcb6f1ba700de7bb5c"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d874eb056410ca05fed180b6642e680373688efafc7f077b2a2f61811e873a40"}, + {file = "multidict-6.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b55d5497b51afdfde55925e04a022f1de14d4f4f25cdfd4f5d9b0aa96166851"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f8e5c0031b90ca9ce555e2e8fd5c3b02a25f14989cbc310701823832c99eb687"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cf41880c991716f3c7cec48e2f19ae4045fc9db5fc9cff27347ada24d710bb5"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8cfc12a8630a29d601f48d47787bd7eb730e475e83edb5d6c5084317463373eb"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3996b50c3237c4aec17459217c1e7bbdead9a22a0fcd3c365564fbd16439dde6"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7f5170993a0dd3ab871c74f45c0a21a4e2c37a2f2b01b5f722a2ad9c6650469e"}, + {file = "multidict-6.7.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ec81878ddf0e98817def1e77d4f50dae5ef5b0e4fe796fae3bd674304172416e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9281bf5b34f59afbc6b1e477a372e9526b66ca446f4bf62592839c195a718b32"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:68af405971779d8b37198726f2b6fe3955db846fee42db7a4286fc542203934c"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3ba3ef510467abb0667421a286dc906e30eb08569365f5cdb131d7aff7c2dd84"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b61189b29081a20c7e4e0b49b44d5d44bb0dc92be3c6d06a11cc043f81bf9329"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:fb287618b9c7aa3bf8d825f02d9201b2f13078a5ed3b293c8f4d953917d84d5e"}, + {file = "multidict-6.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:521f33e377ff64b96c4c556b81c55d0cfffb96a11c194fd0c3f1e56f3d8dd5a4"}, + {file = "multidict-6.7.0-cp39-cp39-win32.whl", hash = "sha256:ce8fdc2dca699f8dbf055a61d73eaa10482569ad20ee3c36ef9641f69afa8c91"}, + {file = "multidict-6.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:7e73299c99939f089dd9b2120a04a516b95cdf8c1cd2b18c53ebf0de80b1f18f"}, + {file = "multidict-6.7.0-cp39-cp39-win_arm64.whl", hash = "sha256:6bdce131e14b04fd34a809b6380dbfd826065c3e2fe8a50dbae659fa0c390546"}, + {file = "multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3"}, + {file = "multidict-6.7.0.tar.gz", hash = "sha256:c6e99d9a65ca282e578dfea819cfa9c0a62b2499d8677392e09feaf305e9e6f5"}, ] [[package]] name = "mypy" -version = "1.10.1" +version = "1.19.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["dev"] markers = "implementation_name == \"cpython\"" files = [ - {file = "mypy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e36f229acfe250dc660790840916eb49726c928e8ce10fbdf90715090fe4ae02"}, - {file = "mypy-1.10.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:51a46974340baaa4145363b9e051812a2446cf583dfaeba124af966fa44593f7"}, - {file = "mypy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:901c89c2d67bba57aaaca91ccdb659aa3a312de67f23b9dfb059727cce2e2e0a"}, - {file = "mypy-1.10.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0cd62192a4a32b77ceb31272d9e74d23cd88c8060c34d1d3622db3267679a5d9"}, - {file = "mypy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:a2cbc68cb9e943ac0814c13e2452d2046c2f2b23ff0278e26599224cf164e78d"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bd6f629b67bb43dc0d9211ee98b96d8dabc97b1ad38b9b25f5e4c4d7569a0c6a"}, - {file = "mypy-1.10.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1bbb3a6f5ff319d2b9d40b4080d46cd639abe3516d5a62c070cf0114a457d84"}, - {file = "mypy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8edd4e9bbbc9d7b79502eb9592cab808585516ae1bcc1446eb9122656c6066f"}, - {file = "mypy-1.10.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6166a88b15f1759f94a46fa474c7b1b05d134b1b61fca627dd7335454cc9aa6b"}, - {file = "mypy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:5bb9cd11c01c8606a9d0b83ffa91d0b236a0e91bc4126d9ba9ce62906ada868e"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d8681909f7b44d0b7b86e653ca152d6dff0eb5eb41694e163c6092124f8246d7"}, - {file = "mypy-1.10.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:378c03f53f10bbdd55ca94e46ec3ba255279706a6aacaecac52ad248f98205d3"}, - {file = "mypy-1.10.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bacf8f3a3d7d849f40ca6caea5c055122efe70e81480c8328ad29c55c69e93e"}, - {file = "mypy-1.10.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:701b5f71413f1e9855566a34d6e9d12624e9e0a8818a5704d74d6b0402e66c04"}, - {file = "mypy-1.10.1-cp312-cp312-win_amd64.whl", hash = "sha256:3c4c2992f6ea46ff7fce0072642cfb62af7a2484efe69017ed8b095f7b39ef31"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604282c886497645ffb87b8f35a57ec773a4a2721161e709a4422c1636ddde5c"}, - {file = "mypy-1.10.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37fd87cab83f09842653f08de066ee68f1182b9b5282e4634cdb4b407266bade"}, - {file = "mypy-1.10.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8addf6313777dbb92e9564c5d32ec122bf2c6c39d683ea64de6a1fd98b90fe37"}, - {file = "mypy-1.10.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cc3ca0a244eb9a5249c7c583ad9a7e881aa5d7b73c35652296ddcdb33b2b9c7"}, - {file = "mypy-1.10.1-cp38-cp38-win_amd64.whl", hash = "sha256:1b3a2ffce52cc4dbaeee4df762f20a2905aa171ef157b82192f2e2f368eec05d"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fe85ed6836165d52ae8b88f99527d3d1b2362e0cb90b005409b8bed90e9059b3"}, - {file = "mypy-1.10.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c2ae450d60d7d020d67ab440c6e3fae375809988119817214440033f26ddf7bf"}, - {file = "mypy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be84c06e6abd72f960ba9a71561c14137a583093ffcf9bbfaf5e613d63fa531"}, - {file = "mypy-1.10.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2189ff1e39db399f08205e22a797383613ce1cb0cb3b13d8bcf0170e45b96cc3"}, - {file = "mypy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:97a131ee36ac37ce9581f4220311247ab6cba896b4395b9c87af0675a13a755f"}, - {file = "mypy-1.10.1-py3-none-any.whl", hash = "sha256:71d8ac0b906354ebda8ef1673e5fde785936ac1f29ff6987c7483cfbd5a4235a"}, - {file = "mypy-1.10.1.tar.gz", hash = "sha256:1f8f492d7db9e3593ef42d4f115f04e556130f2819ad33ab84551403e97dd4c0"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec"}, + {file = "mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6"}, + {file = "mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74"}, + {file = "mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1"}, + {file = "mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288"}, + {file = "mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6"}, + {file = "mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331"}, + {file = "mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925"}, + {file = "mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1"}, + {file = "mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2"}, + {file = "mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8"}, + {file = "mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a"}, + {file = "mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250"}, + {file = "mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e"}, + {file = "mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef"}, + {file = "mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75"}, + {file = "mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1"}, + {file = "mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b"}, + {file = "mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045"}, + {file = "mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957"}, + {file = "mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bcfc336a03a1aaa26dfce9fff3e287a3ba99872a157561cbfcebe67c13308e3"}, + {file = "mypy-1.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b7951a701c07ea584c4fe327834b92a30825514c868b1f69c30445093fdd9d5a"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b13cfdd6c87fc3efb69ea4ec18ef79c74c3f98b4e5498ca9b85ab3b2c2329a67"}, + {file = "mypy-1.19.1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4f28f99c824ecebcdaa2e55d82953e38ff60ee5ec938476796636b86afa3956e"}, + {file = "mypy-1.19.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c608937067d2fc5a4dd1a5ce92fd9e1398691b8c5d012d66e1ddd430e9244376"}, + {file = "mypy-1.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:409088884802d511ee52ca067707b90c883426bd95514e8cfda8281dc2effe24"}, + {file = "mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247"}, + {file = "mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" -typing-extensions = ">=4.1.0" +librt = {version = ">=0.6.2", markers = "platform_python_implementation != \"PyPy\""} +mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -1144,18 +1333,19 @@ files = [ [[package]] name = "mypy-zope" -version = "1.0.5" +version = "1.0.14" description = "Plugin for mypy to support zope interfaces" optional = false python-versions = "*" groups = ["dev"] markers = "implementation_name == \"cpython\"" files = [ - {file = "mypy_zope-1.0.5.tar.gz", hash = "sha256:2440406d49c0e1199c1cd819c92a2c4957de65579c6abc8a081c927f4bdc8d49"}, + {file = "mypy_zope-1.0.14-py3-none-any.whl", hash = "sha256:8842ade93630421dbec0c9906d6515f6e65c6407ef8b9b2eb7f4f73ae1e8a42a"}, + {file = "mypy_zope-1.0.14.tar.gz", hash = "sha256:42555ad4703f2e50c912de3ebe0c7197619c3f71864817fabc5385ecea0f8449"}, ] [package.dependencies] -mypy = ">=1.0.0,<1.11.0" +mypy = ">=1.0.0,<1.20.0" "zope.interface" = "*" "zope.schema" = "*" @@ -1416,7 +1606,6 @@ description = "C parser in Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" groups = ["main"] -markers = "platform_python_implementation != \"PyPy\" or implementation_name == \"pypy\"" files = [ {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, @@ -1424,55 +1613,49 @@ files = [ [[package]] name = "pydantic" -version = "1.10.17" +version = "1.10.26" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" groups = ["main"] files = [ - {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, - {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, - {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, - {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, - {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, - {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, - {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, - {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, - {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, - {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, - {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, - {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, - {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, - {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, - {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, - {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, - {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, - {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, - {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, - {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, - {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, - {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, - {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, - {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, - {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, - {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, - {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, - {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, - {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, - {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, - {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, - {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, - {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, - {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, - {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, - {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, - {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, - {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, - {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, - {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, - {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, - {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, - {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, + {file = "pydantic-1.10.26-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7ae36fa0ecef8d39884120f212e16c06bb096a38f523421278e2f39c1784546"}, + {file = "pydantic-1.10.26-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d95a76cf503f0f72ed7812a91de948440b2bf564269975738a4751e4fadeb572"}, + {file = "pydantic-1.10.26-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:a943ce8e00ad708ed06a1d9df5b4fd28f5635a003b82a4908ece6f24c0b18464"}, + {file = "pydantic-1.10.26-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:465ad8edb29b15c10b779b16431fe8e77c380098badf6db367b7a1d3e572cf53"}, + {file = "pydantic-1.10.26-cp310-cp310-win_amd64.whl", hash = "sha256:80e6be6272839c8a7641d26ad569ab77772809dd78f91d0068dc0fc97f071945"}, + {file = "pydantic-1.10.26-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:116233e53889bcc536f617e38c1b8337d7fa9c280f0fd7a4045947515a785637"}, + {file = "pydantic-1.10.26-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c3cfdd361addb6eb64ccd26ac356ad6514cee06a61ab26b27e16b5ed53108f77"}, + {file = "pydantic-1.10.26-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0e4451951a9a93bf9a90576f3e25240b47ee49ab5236adccb8eff6ac943adf0f"}, + {file = "pydantic-1.10.26-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9858ed44c6bea5f29ffe95308db9e62060791c877766c67dd5f55d072c8612b5"}, + {file = "pydantic-1.10.26-cp311-cp311-win_amd64.whl", hash = "sha256:ac1089f723e2106ebde434377d31239e00870a7563245072968e5af5cc4d33df"}, + {file = "pydantic-1.10.26-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:468d5b9cacfcaadc76ed0a4645354ab6f263ec01a63fb6d05630ea1df6ae453f"}, + {file = "pydantic-1.10.26-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2c1b0b914be31671000ca25cf7ea17fcaaa68cfeadf6924529c5c5aa24b7ab1f"}, + {file = "pydantic-1.10.26-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15b13b9f8ba8867095769e1156e0d7fbafa1f65b898dd40fd1c02e34430973cb"}, + {file = "pydantic-1.10.26-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad7025ca324ae263d4313998e25078dcaec5f9ed0392c06dedb57e053cc8086b"}, + {file = "pydantic-1.10.26-cp312-cp312-win_amd64.whl", hash = "sha256:4482b299874dabb88a6c3759e3d85c6557c407c3b586891f7d808d8a38b66b9c"}, + {file = "pydantic-1.10.26-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1ae7913bb40a96c87e3d3f6fe4e918ef53bf181583de4e71824360a9b11aef1c"}, + {file = "pydantic-1.10.26-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8154c13f58d4de5d3a856bb6c909c7370f41fb876a5952a503af6b975265f4ba"}, + {file = "pydantic-1.10.26-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f8af0507bf6118b054a9765fb2e402f18a8b70c964f420d95b525eb711122d62"}, + {file = "pydantic-1.10.26-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dcb5a7318fb43189fde6af6f21ac7149c4bcbcfffc54bc87b5becddc46084847"}, + {file = "pydantic-1.10.26-cp313-cp313-win_amd64.whl", hash = "sha256:71cde228bc0600cf8619f0ee62db050d1880dcc477eba0e90b23011b4ee0f314"}, + {file = "pydantic-1.10.26-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:6b40730cc81d53d515dc0b8bb5c9b43fadb9bed46de4a3c03bd95e8571616dba"}, + {file = "pydantic-1.10.26-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c3bbb9c0eecdf599e4db9b372fa9cc55be12e80a0d9c6d307950a39050cb0e37"}, + {file = "pydantic-1.10.26-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:cc2e3fe7bc4993626ef6b6fa855defafa1d6f8996aa1caef2deb83c5ac4d043a"}, + {file = "pydantic-1.10.26-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:36d9e46b588aaeb1dcd2409fa4c467fe0b331f3cc9f227b03a7a00643704e962"}, + {file = "pydantic-1.10.26-cp314-cp314-win_amd64.whl", hash = "sha256:81ce3c8616d12a7be31b4aadfd3434f78f6b44b75adbfaec2fe1ad4f7f999b8c"}, + {file = "pydantic-1.10.26-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc5c91a3b3106caf07ac6735ec6efad8ba37b860b9eb569923386debe65039ad"}, + {file = "pydantic-1.10.26-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dde599e0388e04778480d57f49355c9cc7916de818bf674de5d5429f2feebfb6"}, + {file = "pydantic-1.10.26-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8be08b5cfe88e58198722861c7aab737c978423c3a27300911767931e5311d0d"}, + {file = "pydantic-1.10.26-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0141f4bafe5eda539d98c9755128a9ea933654c6ca4306b5059fc87a01a38573"}, + {file = "pydantic-1.10.26-cp38-cp38-win_amd64.whl", hash = "sha256:eb664305ffca8a9766a8629303bb596607d77eae35bb5f32ff9245984881b638"}, + {file = "pydantic-1.10.26-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:502b9d30d18a2dfaf81b7302f6ba0e5853474b1c96212449eb4db912cb604b7d"}, + {file = "pydantic-1.10.26-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0d8f6087bf697dec3bf7ffcd7fe8362674f16519f3151789f33cbe8f1d19fc15"}, + {file = "pydantic-1.10.26-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:dd40a99c358419910c85e6f5d22f9c56684c25b5e7abc40879b3b4a52f34ae90"}, + {file = "pydantic-1.10.26-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ce3293b86ca9f4125df02ff0a70be91bc7946522467cbd98e7f1493f340616ba"}, + {file = "pydantic-1.10.26-cp39-cp39-win_amd64.whl", hash = "sha256:1a4e3062b71ab1d5df339ba12c48f9ed5817c5de6cb92a961dd5c64bb32e7b96"}, + {file = "pydantic-1.10.26-py3-none-any.whl", hash = "sha256:c43ad70dc3ce7787543d563792426a16fd7895e14be4b194b5665e36459dd917"}, + {file = "pydantic-1.10.26.tar.gz", hash = "sha256:8c6aa39b494c5af092e690127c283d84f363ac36017106a9e66cb33a22ac412e"}, ] [package.dependencies] @@ -1668,6 +1851,7 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -2286,6 +2470,7 @@ files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +markers = {dev = "implementation_name == \"cpython\""} [[package]] name = "urllib3" @@ -2461,58 +2646,50 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "6.1" +version = "8.2" description = "Interfaces for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.10" groups = ["main", "dev"] files = [ - {file = "zope.interface-6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:43b576c34ef0c1f5a4981163b551a8781896f2a37f71b8655fd20b5af0386abb"}, - {file = "zope.interface-6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:67be3ca75012c6e9b109860820a8b6c9a84bfb036fbd1076246b98e56951ca92"}, - {file = "zope.interface-6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9bc671626281f6045ad61d93a60f52fd5e8209b1610972cf0ef1bbe6d808e3"}, - {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe81def9cf3e46f16ce01d9bfd8bea595e06505e51b7baf45115c77352675fd"}, - {file = "zope.interface-6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6dc998f6de015723196a904045e5a2217f3590b62ea31990672e31fbc5370b41"}, - {file = "zope.interface-6.1-cp310-cp310-win_amd64.whl", hash = "sha256:239a4a08525c080ff833560171d23b249f7f4d17fcbf9316ef4159f44997616f"}, - {file = "zope.interface-6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9ffdaa5290422ac0f1688cb8adb1b94ca56cee3ad11f29f2ae301df8aecba7d1"}, - {file = "zope.interface-6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34c15ca9248f2e095ef2e93af2d633358c5f048c49fbfddf5fdfc47d5e263736"}, - {file = "zope.interface-6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b012d023b4fb59183909b45d7f97fb493ef7a46d2838a5e716e3155081894605"}, - {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:97806e9ca3651588c1baaebb8d0c5ee3db95430b612db354c199b57378312ee8"}, - {file = "zope.interface-6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fddbab55a2473f1d3b8833ec6b7ac31e8211b0aa608df5ab09ce07f3727326de"}, - {file = "zope.interface-6.1-cp311-cp311-win_amd64.whl", hash = "sha256:a0da79117952a9a41253696ed3e8b560a425197d4e41634a23b1507efe3273f1"}, - {file = "zope.interface-6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8bb9c990ca9027b4214fa543fd4025818dc95f8b7abce79d61dc8a2112b561a"}, - {file = "zope.interface-6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b51b64432eed4c0744241e9ce5c70dcfecac866dff720e746d0a9c82f371dfa7"}, - {file = "zope.interface-6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa6fd016e9644406d0a61313e50348c706e911dca29736a3266fc9e28ec4ca6d"}, - {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c8cf55261e15590065039696607f6c9c1aeda700ceee40c70478552d323b3ff"}, - {file = "zope.interface-6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e30506bcb03de8983f78884807e4fd95d8db6e65b69257eea05d13d519b83ac0"}, - {file = "zope.interface-6.1-cp312-cp312-win_amd64.whl", hash = "sha256:e33e86fd65f369f10608b08729c8f1c92ec7e0e485964670b4d2633a4812d36b"}, - {file = "zope.interface-6.1-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:2f8d89721834524a813f37fa174bac074ec3d179858e4ad1b7efd4401f8ac45d"}, - {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13b7d0f2a67eb83c385880489dbb80145e9d344427b4262c49fbf2581677c11c"}, - {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef43ee91c193f827e49599e824385ec7c7f3cd152d74cb1dfe02cb135f264d83"}, - {file = "zope.interface-6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e441e8b7d587af0414d25e8d05e27040d78581388eed4c54c30c0c91aad3a379"}, - {file = "zope.interface-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f89b28772fc2562ed9ad871c865f5320ef761a7fcc188a935e21fe8b31a38ca9"}, - {file = "zope.interface-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70d2cef1bf529bff41559be2de9d44d47b002f65e17f43c73ddefc92f32bf00f"}, - {file = "zope.interface-6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ad54ed57bdfa3254d23ae04a4b1ce405954969c1b0550cc2d1d2990e8b439de1"}, - {file = "zope.interface-6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef467d86d3cfde8b39ea1b35090208b0447caaabd38405420830f7fd85fbdd56"}, - {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af47f10cfc54c2ba2d825220f180cc1e2d4914d783d6fc0cd93d43d7bc1c78b"}, - {file = "zope.interface-6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9559138690e1bd4ea6cd0954d22d1e9251e8025ce9ede5d0af0ceae4a401e43"}, - {file = "zope.interface-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:964a7af27379ff4357dad1256d9f215047e70e93009e532d36dcb8909036033d"}, - {file = "zope.interface-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:387545206c56b0315fbadb0431d5129c797f92dc59e276b3ce82db07ac1c6179"}, - {file = "zope.interface-6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:57d0a8ce40ce440f96a2c77824ee94bf0d0925e6089df7366c2272ccefcb7941"}, - {file = "zope.interface-6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ebc4d34e7620c4f0da7bf162c81978fce0ea820e4fa1e8fc40ee763839805f3"}, - {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a804abc126b33824a44a7aa94f06cd211a18bbf31898ba04bd0924fbe9d282d"}, - {file = "zope.interface-6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f294a15f7723fc0d3b40701ca9b446133ec713eafc1cc6afa7b3d98666ee1ac"}, - {file = "zope.interface-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a41f87bb93b8048fe866fa9e3d0c51e27fe55149035dcf5f43da4b56732c0a40"}, - {file = "zope.interface-6.1.tar.gz", hash = "sha256:2fdc7ccbd6eb6b7df5353012fbed6c3c5d04ceaca0038f75e601060e95345309"}, + {file = "zope_interface-8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:788c293f3165964ec6527b2d861072c68eef53425213f36d3893ebee89a89623"}, + {file = "zope_interface-8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9a4e785097e741a1c953b3970ce28f2823bd63c00adc5d276f2981dd66c96c15"}, + {file = "zope_interface-8.2-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:16c69da19a06566664ddd4785f37cad5693a51d48df1515d264c20d005d322e2"}, + {file = "zope_interface-8.2-cp310-cp310-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c31acfa3d7cde48bec45701b0e1f4698daffc378f559bfb296837d8c834732f6"}, + {file = "zope_interface-8.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0723507127f8269b8f3f22663168f717e9c9742107d1b6c9f419df561b71aa6d"}, + {file = "zope_interface-8.2-cp310-cp310-win_amd64.whl", hash = "sha256:3bf73a910bb27344def2d301a03329c559a79b308e1e584686b74171d736be4e"}, + {file = "zope_interface-8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c65ade7ea85516e428651048489f5e689e695c79188761de8c622594d1e13322"}, + {file = "zope_interface-8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a1ef4b43659e1348f35f38e7d1a6bbc1682efde239761f335ffc7e31e798b65b"}, + {file = "zope_interface-8.2-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:dfc4f44e8de2ff4eba20af4f0a3ca42d3c43ab24a08e49ccd8558b7a4185b466"}, + {file = "zope_interface-8.2-cp311-cp311-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8f094bfb49179ec5dc9981cb769af1275702bd64720ef94874d9e34da1390d4c"}, + {file = "zope_interface-8.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d2bb8e7364e18f083bf6744ccf30433b2a5f236c39c95df8514e3c13007098ce"}, + {file = "zope_interface-8.2-cp311-cp311-win_amd64.whl", hash = "sha256:6f4b4dfcfdfaa9177a600bb31cebf711fdb8c8e9ed84f14c61c420c6aa398489"}, + {file = "zope_interface-8.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:624b6787fc7c3e45fa401984f6add2c736b70a7506518c3b537ffaacc4b29d4c"}, + {file = "zope_interface-8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc9ded9e97a0ed17731d479596ed1071e53b18e6fdb2fc33af1e43f5fd2d3aaa"}, + {file = "zope_interface-8.2-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:532367553e4420c80c0fc0cabcc2c74080d495573706f66723edee6eae53361d"}, + {file = "zope_interface-8.2-cp312-cp312-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2bf9cf275468bafa3c72688aad8cfcbe3d28ee792baf0b228a1b2d93bd1d541a"}, + {file = "zope_interface-8.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0009d2d3c02ea783045d7804da4fd016245e5c5de31a86cebba66dd6914d59a2"}, + {file = "zope_interface-8.2-cp312-cp312-win_amd64.whl", hash = "sha256:845d14e580220ae4544bd4d7eb800f0b6034fe5585fc2536806e0a26c2ee6640"}, + {file = "zope_interface-8.2-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:6068322004a0158c80dfd4708dfb103a899635408c67c3b10e9acec4dbacefec"}, + {file = "zope_interface-8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2499de92e8275d0dd68f84425b3e19e9268cd1fa8507997900fa4175f157733c"}, + {file = "zope_interface-8.2-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:f777e68c76208503609c83ca021a6864902b646530a1a39abb9ed310d1100664"}, + {file = "zope_interface-8.2-cp313-cp313-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b05a919fdb0ed6ea942e5a7800e09a8b6cdae6f98fee1bef1c9d1a3fc43aaa0"}, + {file = "zope_interface-8.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ccc62b5712dd7bd64cfba3ee63089fb11e840f5914b990033beeae3b2180b6cb"}, + {file = "zope_interface-8.2-cp313-cp313-win_amd64.whl", hash = "sha256:34f877d1d3bb7565c494ed93828fa6417641ca26faf6e8f044e0d0d500807028"}, + {file = "zope_interface-8.2-cp314-cp314-macosx_10_9_x86_64.whl", hash = "sha256:46c7e4e8cbc698398a67e56ca985d19cb92365b4aafbeb6a712e8c101090f4cb"}, + {file = "zope_interface-8.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a87fc7517f825a97ff4a4ca4c8a950593c59e0f8e7bfe1b6f898a38d5ba9f9cf"}, + {file = "zope_interface-8.2-cp314-cp314-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:ccf52f7d44d669203c2096c1a0c2c15d52e36b2e7a9413df50f48392c7d4d080"}, + {file = "zope_interface-8.2-cp314-cp314-manylinux1_x86_64.manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aae807efc7bd26302eb2fea05cd6de7d59269ed6ae23a6de1ee47add6de99b8c"}, + {file = "zope_interface-8.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:05a0e42d6d830f547e114de2e7cd15750dc6c0c78f8138e6c5035e51ddfff37c"}, + {file = "zope_interface-8.2-cp314-cp314-win_amd64.whl", hash = "sha256:561ce42390bee90bae51cf1c012902a8033b2aaefbd0deed81e877562a116d48"}, + {file = "zope_interface-8.2.tar.gz", hash = "sha256:afb20c371a601d261b4f6edb53c3c418c249db1a9717b0baafc9a9bb39ba1224"}, ] markers = {dev = "implementation_name == \"cpython\""} -[package.dependencies] -setuptools = "*" - [package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] -test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] -testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"] +test = ["coverage[toml]", "zope.event", "zope.testing"] +testing = ["coverage[toml]", "zope.event", "zope.testing"] [[package]] name = "zope-schema" @@ -2542,4 +2719,4 @@ sentry = ["sentry-sdk", "structlog-sentry"] [metadata] lock-version = "2.1" python-versions = ">=3.11,<4" -content-hash = "94cb3f852de11baa61d5004dd424135b1f85d70410ac170ef5362085c2b6b983" +content-hash = "00c97ccd8d0b45e844ae915c1fa9aab9d5616ad0be8562956179a665906ee277" diff --git a/pyproject.toml b/pyproject.toml index d7655abdc..b4a64f209 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.68.4" +version = "0.69.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" @@ -26,7 +26,6 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Operating System :: OS Independent", - "License :: OSI Approved :: Apache Software License", "Private :: Do Not Upload", ] exclude = ["hathor_tests", "hathor_tests.*"] @@ -41,8 +40,8 @@ hathor-cli = 'hathor_cli.main:main' [tool.poetry.group.dev.dependencies] flake8 = "~7.1.1" isort = {version = "~5.13.2", extras = ["colors"]} -mypy = {version = "^1.10.1", markers = "implementation_name == 'cpython'"} -mypy-zope = {version = "^1.0.5", markers = "implementation_name == 'cpython'"} +mypy = {version = "^1.19.1", markers = "implementation_name == 'cpython'"} +mypy-zope = {version = "^1.0.14", markers = "implementation_name == 'cpython'"} pytest = "~8.3.2" pytest-cov = "~5.0.0" flaky = "~3.8.1" @@ -59,7 +58,7 @@ twisted = "~24.7.0" autobahn = "~24.4.2" base58 = "~2.1.1" colorama = "~0.4.6" -configargparse = "~1.5.3" +configargparse = "~1.7.1" cryptography = "~42.0.5" graphviz = "~0.20.1" ipython = {version = "~8.7.0", extras = ["kernel"]} @@ -78,11 +77,14 @@ idna = "~3.4" setproctitle = "^1.3.3" sentry-sdk = {version = "^1.5.11", optional = true} structlog-sentry = {version = "^1.4.0", optional = true} -hathorlib = "^0.12.0" -pydantic = "~1.10.17" +pydantic = "~1.10.26" pyyaml = "^6.0.1" typing-extensions = "~4.12.2" python-healthchecklib = "^0.1.0" +multidict = "=6.7.0" +hathorlib = {path = "hathorlib"} +cffi = "=1.17.1" +zope-interface = "=8.2" [tool.poetry.extras] sentry = ["sentry-sdk", "structlog-sentry"]