diff --git a/hathor/api_util.py b/hathor/api_util.py index 160ff9d0d..b31e6acfa 100644 --- a/hathor/api_util.py +++ b/hathor/api_util.py @@ -13,7 +13,7 @@ # limitations under the License. import re -from typing import Any, Dict, List, Optional, Tuple, TypeVar, Union, cast +from typing import Any, Optional, TypeVar, Union, cast from twisted.web.http import Request from twisted.web.resource import Resource as TwistedResource @@ -57,7 +57,7 @@ def get_missing_params_msg(param_name): return json_dumpb({'success': False, 'message': f'Missing parameter: {param_name}'}) -def parse_args(args: Dict[bytes, List[bytes]], expected_args: List[str]) -> Dict[str, Any]: +def parse_args(args: dict[bytes, list[bytes]], expected_args: list[str]) -> dict[str, Any]: """Parse all expected arguments. If there are missing arguments, returns the missing arguments """ expected_set = set(expected_args) @@ -70,7 +70,7 @@ def parse_args(args: Dict[bytes, List[bytes]], expected_args: List[str]) -> Dict if diff: return {'success': False, 'missing': ', '.join(sorted(diff))} - ret: Dict[str, str] = dict() + ret: dict[str, str] = dict() for arg2 in expected_args: key_str = arg2.encode('utf-8') first_param = args[key_str][0] @@ -93,7 +93,7 @@ def parse_int(raw: Union[str, bytes], *, return value -def validate_tx_hash(hash_hex: str, tx_storage: TransactionStorage) -> Tuple[bool, str]: +def validate_tx_hash(hash_hex: str, tx_storage: TransactionStorage) -> tuple[bool, str]: """ Validate if the tx hash is valid and if it exists Return success and a message in case of failure """ @@ -118,18 +118,18 @@ def validate_tx_hash(hash_hex: str, tx_storage: TransactionStorage) -> Tuple[boo class Resource(TwistedResource): - openapi: Dict[str, Any] = {} + openapi: dict[str, Any] = {} -def get_args(request: Request) -> Dict[bytes, List[bytes]]: +def get_args(request: Request) -> dict[bytes, list[bytes]]: """Type-friendly way to access request.args, also always returns a dict instead of None.""" - args = cast(Optional[Dict[bytes, List[bytes]]], request.args) + args = cast(Optional[dict[bytes, list[bytes]]], request.args) if args is None: return {} return args -def get_arg_default(args: Dict[bytes, List[bytes]], key: str, default: T) -> T: +def get_arg_default(args: dict[bytes, list[bytes]], key: str, default: T) -> T: """Get a value with given key from an request.args formatted dict, return default if key was not found. Examples: diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index e298993cb..7e51fc1fd 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -13,7 +13,7 @@ # limitations under the License. from enum import Enum -from typing import Any, Dict, List, NamedTuple, Optional, Set +from typing import Any, NamedTuple, Optional from structlog import get_logger @@ -75,8 +75,8 @@ def __init__(self) -> None: self._settings: HathorSettingsType = HathorSettings() self._rng: Random = Random() - self._checkpoints: Optional[List[Checkpoint]] = None - self._capabilities: Optional[List[str]] = None + self._checkpoints: Optional[list[Checkpoint]] = None + self._capabilities: Optional[list[str]] = None self._peer_id: Optional[PeerId] = None self._network: Optional[str] = None @@ -116,7 +116,7 @@ def __init__(self) -> None: self._full_verification: Optional[bool] = None - self._soft_voided_tx_ids: Optional[Set[bytes]] = None + self._soft_voided_tx_ids: Optional[set[bytes]] = None def build(self) -> BuildArtifacts: if self.artifacts is not None: @@ -151,7 +151,7 @@ def build(self) -> BuildArtifacts: if self._enable_utxo_index: indexes.enable_utxo_index() - kwargs: Dict[str, Any] = {} + kwargs: dict[str, Any] = {} if self._full_verification is not None: kwargs['full_verification'] = self._full_verification @@ -214,12 +214,12 @@ def set_rng(self, rng: Random) -> 'Builder': self._rng = rng return self - def set_checkpoints(self, checkpoints: List[Checkpoint]) -> 'Builder': + def set_checkpoints(self, checkpoints: list[Checkpoint]) -> 'Builder': self.check_if_can_modify() self._checkpoints = checkpoints return self - def set_capabilities(self, capabilities: List[str]) -> 'Builder': + def set_capabilities(self, capabilities: list[str]) -> 'Builder': self.check_if_can_modify() self._capabilities = capabilities return self @@ -237,7 +237,7 @@ def _get_reactor(self) -> Reactor: return self._reactor raise ValueError('reactor not set') - def _get_soft_voided_tx_ids(self) -> Set[bytes]: + def _get_soft_voided_tx_ids(self) -> set[bytes]: if self._soft_voided_tx_ids is not None: return self._soft_voided_tx_ids @@ -510,7 +510,7 @@ def disable_full_verification(self) -> 'Builder': self._full_verification = False return self - def set_soft_voided_tx_ids(self, soft_voided_tx_ids: Set[bytes]) -> 'Builder': + def set_soft_voided_tx_ids(self, soft_voided_tx_ids: set[bytes]) -> 'Builder': self.check_if_can_modify() self._soft_voided_tx_ids = soft_voided_tx_ids return self diff --git a/hathor/builder/resources_builder.py b/hathor/builder/resources_builder.py index 8925c0d5b..d5e04e382 100644 --- a/hathor/builder/resources_builder.py +++ b/hathor/builder/resources_builder.py @@ -14,7 +14,7 @@ import os from argparse import Namespace -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional from autobahn.twisted.resource import WebSocketResource from structlog import get_logger @@ -50,7 +50,7 @@ def build(self, args: Namespace) -> Optional[server.Site]: return None def create_prometheus(self, args: Namespace) -> PrometheusMetricsExporter: - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { 'metrics': self.manager.metrics, 'metrics_prefix': args.prometheus_prefix } diff --git a/hathor/cli/main.py b/hathor/cli/main.py index 82403398f..a9c287cbf 100644 --- a/hathor/cli/main.py +++ b/hathor/cli/main.py @@ -16,7 +16,7 @@ import sys from collections import defaultdict from types import ModuleType -from typing import Dict, List, Optional +from typing import Optional from structlog import get_logger @@ -26,9 +26,9 @@ class CliManager: def __init__(self) -> None: self.basename: str = os.path.basename(sys.argv[0]) - self.command_list: Dict[str, ModuleType] = {} - self.cmd_description: Dict[str, str] = {} - self.groups: Dict[str, List[str]] = defaultdict(list) + self.command_list: dict[str, ModuleType] = {} + self.cmd_description: dict[str, str] = {} + self.groups: dict[str, list[str]] = defaultdict(list) self.longest_cmd: int = 0 from . import ( diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 4be0ddec7..9a373be90 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -21,7 +21,6 @@ from argparse import ArgumentParser, Namespace from json.decoder import JSONDecodeError from multiprocessing import Process, Queue -from typing import Tuple import requests @@ -72,7 +71,7 @@ def execute(args: Namespace) -> None: total = 0 conn_retries = 0 - q_in: Queue[Tuple[Block, int, int, int]] + q_in: Queue[tuple[Block, int, int, int]] q_out: Queue[Block] q_in, q_out = Queue(), Queue() while True: diff --git a/hathor/cli/nginx_config.py b/hathor/cli/nginx_config.py index 3019fe5f9..f8d70fc39 100644 --- a/hathor/cli/nginx_config.py +++ b/hathor/cli/nginx_config.py @@ -15,14 +15,14 @@ import json import os from enum import Enum -from typing import Any, Dict, List, NamedTuple, Optional, TextIO, Tuple +from typing import Any, NamedTuple, Optional, TextIO from hathor.cli.openapi_json import get_openapi_dict BASE_PATH = os.path.join(os.path.dirname(__file__), 'nginx_files') -def get_openapi(src_file: Optional[TextIO] = None) -> Dict[str, Any]: +def get_openapi(src_file: Optional[TextIO] = None) -> dict[str, Any]: """ Open and parse the json file or generate OpenAPI dict on-the-fly """ if src_file is None: @@ -101,14 +101,14 @@ def _scale_rate_limit(raw_rate: str, rate_k: float) -> str: return f'{int(scaled_rate_amount)}{rate_units}' -def _get_visibility(source: Dict[str, Any], fallback: Visibility) -> Tuple[Visibility, bool]: +def _get_visibility(source: dict[str, Any], fallback: Visibility) -> tuple[Visibility, bool]: if 'x-visibility' in source: return Visibility(source['x-visibility']), False else: return fallback, True -def generate_nginx_config(openapi: Dict[str, Any], *, out_file: TextIO, rate_k: float = 1.0, +def generate_nginx_config(openapi: dict[str, Any], *, out_file: TextIO, rate_k: float = 1.0, fallback_visibility: Visibility = Visibility.PRIVATE, disable_rate_limits: bool = False) -> None: """ Entry point of the functionality provided by the cli @@ -120,8 +120,8 @@ def generate_nginx_config(openapi: Dict[str, Any], *, out_file: TextIO, rate_k: settings = HathorSettings() api_prefix = settings.API_VERSION_PREFIX - locations: Dict[str, Dict[str, Any]] = {} - limit_rate_zones: List[RateLimitZone] = [] + locations: dict[str, dict[str, Any]] = {} + limit_rate_zones: list[RateLimitZone] = [] for path, params in openapi['paths'].items(): visibility, did_fallback = _get_visibility(params, fallback_visibility) if did_fallback: @@ -129,7 +129,7 @@ def generate_nginx_config(openapi: Dict[str, Any], *, out_file: TextIO, rate_k: if visibility is Visibility.PRIVATE: continue - location_params: Dict[str, Any] = { + location_params: dict[str, Any] = { 'rate_limits': [], 'path_vars_re': params.get('x-path-params-regex', {}), } diff --git a/hathor/cli/openapi_files/register.py b/hathor/cli/openapi_files/register.py index 252a90238..3e531f97d 100644 --- a/hathor/cli/openapi_files/register.py +++ b/hathor/cli/openapi_files/register.py @@ -12,15 +12,15 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Type, TypeVar +from typing import TypeVar from hathor.api_util import Resource -_registered_resources: List[Type[Resource]] = [] +_registered_resources: list[type[Resource]] = [] # XXX: this type var is used to indicate that the returned class is the same as the input class -ResourceClass = TypeVar('ResourceClass', bound=Type[Resource]) +ResourceClass = TypeVar('ResourceClass', bound=type[Resource]) def register_resource(resource_class: ResourceClass) -> ResourceClass: @@ -31,7 +31,7 @@ def register_resource(resource_class: ResourceClass) -> ResourceClass: return resource_class -def get_registered_resources() -> List[Type[Resource]]: +def get_registered_resources() -> list[type[Resource]]: """ Returns a list with all the resources registered for the docs """ import hathor.p2p.resources # noqa: 401 diff --git a/hathor/cli/openapi_json.py b/hathor/cli/openapi_json.py index 2d1539cb9..d5b0feec1 100644 --- a/hathor/cli/openapi_json.py +++ b/hathor/cli/openapi_json.py @@ -14,27 +14,27 @@ import json import os -from typing import Any, Dict +from typing import Any BASE_PATH = os.path.join(os.path.dirname(__file__), 'openapi_files') DEFAULT_OUTPUT_PATH = os.path.join(BASE_PATH, 'openapi.json') -def get_base() -> Dict[str, Any]: +def get_base() -> dict[str, Any]: """ Returns the base configuration from OpenAPI json """ with open(os.path.join(BASE_PATH, 'openapi_base.json'), 'r') as f: return json.load(f) -def get_components() -> Dict[str, Any]: +def get_components() -> dict[str, Any]: """ Returns the components from OpenAPI json """ with open(os.path.join(BASE_PATH, 'openapi_components.json'), 'r') as f: return json.load(f) -def get_openapi_dict() -> Dict[str, Any]: +def get_openapi_dict() -> dict[str, Any]: """ Returns the generated OpenAPI dict """ from hathor.cli.openapi_files.register import get_registered_resources diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 4c4169201..51a2692da 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -15,7 +15,7 @@ import os import sys from argparse import SUPPRESS, ArgumentParser, Namespace -from typing import Any, Callable, List, Tuple +from typing import Any, Callable from pydantic import ValidationError from structlog import get_logger @@ -28,7 +28,7 @@ class RunNode: - UNSAFE_ARGUMENTS: List[Tuple[str, Callable[[Namespace], bool]]] = [ + UNSAFE_ARGUMENTS: list[tuple[str, Callable[[Namespace], bool]]] = [ ('--test-mode-tx-weight', lambda args: bool(args.test_mode_tx_weight)), ('--enable-crash-api', lambda args: bool(args.enable_crash_api)), ('--x-sync-bridge', lambda args: bool(args.x_sync_bridge)), @@ -361,7 +361,7 @@ def init_sysctl(self, description: str) -> None: endpoint = serverFromString(self.reactor, description) endpoint.listen(factory) - def parse_args(self, argv: List[str]) -> Namespace: + def parse_args(self, argv: list[str]) -> Namespace: return self.parser.parse_args(argv) def run(self) -> None: diff --git a/hathor/cli/shell.py b/hathor/cli/shell.py index e8acee385..7dc7a121d 100644 --- a/hathor/cli/shell.py +++ b/hathor/cli/shell.py @@ -13,12 +13,12 @@ # limitations under the License. from argparse import Namespace -from typing import Any, Callable, Dict, List +from typing import Any, Callable from hathor.cli.run_node import RunNode -def get_ipython(extra_args: List[Any], imported_objects: Dict[str, Any]) -> Callable[[], None]: +def get_ipython(extra_args: list[Any], imported_objects: dict[str, Any]) -> Callable[[], None]: from IPython import start_ipython def run_ipython(): @@ -37,7 +37,7 @@ def register_signal_handlers(self, args: Namespace) -> None: def prepare(self, args: Namespace, *, register_resources: bool = True) -> None: super().prepare(args, register_resources=False) - imported_objects: Dict[str, Any] = {} + imported_objects: dict[str, Any] = {} imported_objects['tx_storage'] = self.tx_storage if args.wallet: imported_objects['wallet'] = self.wallet @@ -51,9 +51,9 @@ def prepare(self, args: Namespace, *, register_resources: bool = True) -> None: print('------------------------') print() - def parse_args(self, argv: List[str]) -> Namespace: + def parse_args(self, argv: list[str]) -> Namespace: # TODO: add help for the `--` extra argument separator - extra_args: List[str] = [] + extra_args: list[str] = [] if '--' in argv: idx = argv.index('--') extra_args = argv[idx + 1:] diff --git a/hathor/cli/top.py b/hathor/cli/top.py index 0d75f09bd..4adfac8f3 100644 --- a/hathor/cli/top.py +++ b/hathor/cli/top.py @@ -22,7 +22,7 @@ from asyncio import AbstractEventLoop from collections import defaultdict from math import floor -from typing import Any, Callable, DefaultDict, Dict, List, Optional, Tuple +from typing import Any, Callable, Optional # XXX: as annoying as it is, a simple `if: raise` is not enough, but putting the whole module inside works if sys.platform != 'win32': @@ -31,8 +31,8 @@ from aiohttp import ClientSession - Key = Tuple[str, ...] - ProcGroup = DefaultDict[Key, List['ProcItem']] + Key = tuple[str, ...] + ProcGroup = defaultdict[Key, list['ProcItem']] # Global color variable. Color: Optional['DefaultColor'] = None @@ -45,10 +45,10 @@ class ProfileData: enabled: bool last_update: float error: str - proc_list: List['ProcItem'] + proc_list: list['ProcItem'] @classmethod - def create_from_api(cls, data: Dict[str, Any]) -> 'ProfileData': + def create_from_api(cls, data: dict[str, Any]) -> 'ProfileData': self = cls() self.hostname = data['hostname'] self.version = data['version'] @@ -69,7 +69,7 @@ class ProcItem: total_time: float @classmethod - def create_from_api(cls, data: Tuple[Key, Dict[str, Any]]) -> 'ProcItem': + def create_from_api(cls, data: tuple[Key, dict[str, Any]]) -> 'ProcItem': self = cls() self.key = tuple(data[0]) stats = data[1] @@ -87,7 +87,7 @@ def __init__(self, win: Any, groups: ProcGroup, max_depth: int, max_rows: int, s self.max_rows = max_rows self.source_width = source_width - self.last_child_stack: List[bool] = [] + self.last_child_stack: list[bool] = [] self.rows: int = 0 def print_tree(self) -> None: @@ -176,7 +176,7 @@ def format_total_time(self, total_time: float) -> str: else: return '{:02d}:{:02d}.{:02d}'.format(minutes, seconds, floor(100 * frac)) - def group_by_parent(proc_list: List[ProcItem]) -> ProcGroup: + def group_by_parent(proc_list: list[ProcItem]) -> ProcGroup: """Group the processes by their parents. It converts from the format received by the API and the format used by the printer. @@ -192,8 +192,8 @@ def group_cpu_percent(groups: ProcGroup, *, threshold: float = 0.5, separator: s """Group processes that consumes less than `threadhold` of CPU.""" new_groups: ProcGroup = defaultdict(list) for key, children in groups.items(): - hidden_procs: DefaultDict[str, List[ProcItem]] = defaultdict(list) - new_children: List[ProcItem] = [] + hidden_procs: defaultdict[str, list[ProcItem]] = defaultdict(list) + new_children: list[ProcItem] = [] for proc in children: if proc.percent_cpu < threshold: local_key = proc.key[-1] @@ -424,7 +424,7 @@ def render(self) -> None: height, width = self.win.getmaxyx() - proc_list: List[ProcItem] = data.proc_list + proc_list: list[ProcItem] = data.proc_list groups: ProcGroup = group_by_parent(proc_list) if self.group_cpu_percent: groups = group_cpu_percent(groups) @@ -451,10 +451,10 @@ class ControlWindow(Window): def __init__(self, manager: 'ScreenManager', win: Any) -> None: super().__init__(manager, win) - self._logs: List[Tuple[str, int]] = [] + self._logs: list[tuple[str, int]] = [] fetcher: 'ProfileAPIClient' = self.manager.fetcher - self.cmd_map: Dict[str, Any] = { + self.cmd_map: dict[str, Any] = { 'start': fetcher.send_start_cmd, 'stop': fetcher.send_stop_cmd, 'reset': fetcher.send_reset_cmd, @@ -564,7 +564,7 @@ def __init__(self, loop: AbstractEventLoop, fetcher: 'ProfileAPIClient', *, self.fetcher: 'ProfileAPIClient' = fetcher - self.screen_list: Dict[str, Window] = { + self.screen_list: dict[str, Window] = { 'help': HelpWindow(self, self.stdscr), 'main': MainWindow(self, self.stdscr), 'control': ControlWindow(self, self.stdscr), @@ -695,7 +695,7 @@ async def send_reset_cmd(self): async def run(self) -> Any: while True: try: - data_dict: Dict[str, Any] = await self.fetch() + data_dict: dict[str, Any] = await self.fetch() data = ProfileData.create_from_api(data_dict) self.last_update = time.time() self.error = '' @@ -717,7 +717,7 @@ async def fetch(self): class DefaultColor: def __init__(self) -> None: - self._color_map: Dict[Tuple[int, int], int] = {} + self._color_map: dict[tuple[int, int], int] = {} A_NONE = 0 A_BOLD = curses.A_BOLD diff --git a/hathor/cli/tx_generator.py b/hathor/cli/tx_generator.py index f96ed2272..22f9496e5 100644 --- a/hathor/cli/tx_generator.py +++ b/hathor/cli/tx_generator.py @@ -19,7 +19,7 @@ import time from argparse import ArgumentParser, Namespace from json.decoder import JSONDecodeError -from typing import Any, Dict +from typing import Any import requests @@ -116,7 +116,7 @@ def signal_handler(sig, frame): value = random.randint(10, 100) # print('Sending {} tokens to {}...'.format(address, value)) - data: Dict[str, Any] = {'outputs': [{'address': address, 'value': value}], 'inputs': []} + data: dict[str, Any] = {'outputs': [{'address': address, 'value': value}], 'inputs': []} if args.timestamp: if args.timestamp == 'server': diff --git a/hathor/cli/util.py b/hathor/cli/util.py index 885322595..5a5244422 100644 --- a/hathor/cli/util.py +++ b/hathor/cli/util.py @@ -16,7 +16,7 @@ from argparse import ArgumentParser from collections import OrderedDict from datetime import datetime -from typing import Any, List +from typing import Any import configargparse import structlog @@ -215,7 +215,7 @@ def kwargs_formatter(_, __, event_dict): event_dict['event'] = event_dict['event'].format(**event_dict) return event_dict - processors: List[Any] = [ + processors: list[Any] = [ structlog.stdlib.filter_by_level, structlog.stdlib.add_logger_name, structlog.stdlib.add_log_level, diff --git a/hathor/client.py b/hathor/client.py index 737f7d3e9..4f7ab4475 100644 --- a/hathor/client.py +++ b/hathor/client.py @@ -19,7 +19,7 @@ import random import string from abc import ABC, abstractmethod -from typing import Any, AsyncIterator, Dict, List, Optional, Set, Tuple, Union +from typing import Any, AsyncIterator, Optional, Union from urllib.parse import urljoin from aiohttp import ClientSession, ClientWebSocketResponse @@ -42,7 +42,7 @@ class APIError(HathorError): class JsonRpcError(HathorError): - def __init__(self, code: int, message: Optional[str] = None, data: Optional[Dict] = None): + def __init__(self, code: int, message: Optional[str] = None, data: Optional[dict] = None): self.code = code self.message = message self.data = data @@ -61,12 +61,12 @@ class IHathorClient(ABC): """ @abstractmethod - async def version(self) -> Tuple[int, int, int]: + async def version(self) -> tuple[int, int, int]: """Get the parsed version returned from `/v1a/version`, a tuple with (major, minor, patch)""" raise NotImplementedError @abstractmethod - async def status(self) -> Dict[str, Any]: + async def status(self) -> dict[str, Any]: """Get the parsed dict returned from `/v1a/status`, format described in `hathor.p2p.resources.status`""" raise NotImplementedError @@ -124,14 +124,14 @@ async def stop(self) -> None: def _get_url(self, url: str) -> str: return urljoin(self._base_url, url.lstrip('/')) - async def version(self) -> Tuple[int, int, int]: + async def version(self) -> tuple[int, int, int]: async with self.session.get(self._get_url('version')) as resp: data = await resp.json() ver = data['version'] major, minor, patch = ver.split('.') return (int(major), int(minor), int(patch)) - async def status(self) -> Dict[str, Any]: + async def status(self) -> dict[str, Any]: async with self.session.get(self._get_url('status')) as resp: return await resp.json() @@ -141,7 +141,7 @@ async def get_block_template(self, address: Optional[str] = None, merged_mining: params: MultiDict[Any] = MultiDict() if address is not None: params.add('address', address) - caps: Set[Capabilities] = set() + caps: set[Capabilities] = set() if merged_mining: caps.add(Capabilities.MERGED_MINING) if caps: @@ -170,7 +170,7 @@ async def mining(self) -> 'MiningChannel': class MiningChannel(IMiningChannel): _ws: ClientWebSocketResponse - _requests: Dict[str, asyncio.Future] + _requests: dict[str, asyncio.Future] _queue: asyncio.Future _task: asyncio.Task @@ -206,7 +206,7 @@ async def __task(self) -> None: else: self._handle_response(data) - def _handle_request(self, data: Dict) -> None: + def _handle_request(self, data: dict) -> None: # only request accepted is a 'mining.notify' notification if data['method'] != 'mining.notify': self.log.warn('unknown method received', data=data) @@ -216,7 +216,7 @@ def _handle_request(self, data: Dict) -> None: self._queue = self.loop.create_future() self._queue.set_result(block_templates) - def _handle_response(self, data: Dict) -> None: + def _handle_response(self, data: dict) -> None: _id = data.get('id') id: Optional[str] = str(_id) if _id else None error = data.get('error') @@ -248,18 +248,18 @@ async def close(self) -> None: await self._ws.close() async def submit(self, block: Block) -> Optional[BlockTemplate]: - resp: Union[bool, Dict] = await self._do_request('mining.submit', { + resp: Union[bool, dict] = await self._do_request('mining.submit', { 'hexdata': bytes(block).hex(), }) if resp: - assert isinstance(resp, Dict) + assert isinstance(resp, dict) error = resp.get('error') if error: raise APIError(error) return BlockTemplate.from_dict(resp['result']) return None - async def _do_request(self, method: str, params: Union[Dict, List]) -> Any: + async def _do_request(self, method: str, params: Union[dict, list]) -> Any: while True: id = ''.join(random.choices(string.printable, k=10)) if id not in self._requests: @@ -284,12 +284,12 @@ class HathorClientStub(IHathorClient): def __init__(self, manager: HathorManager): self.manager = manager - async def version(self) -> Tuple[int, int, int]: + async def version(self) -> tuple[int, int, int]: from hathor.version import __version__ major, minor, patch = __version__.split('.') return (int(major), int(minor), int(patch)) - async def status(self) -> Dict[str, Any]: + async def status(self) -> dict[str, Any]: return {} async def get_block_template(self, address: Optional[str] = None, merged_mining: bool = False) -> Block: @@ -344,7 +344,7 @@ async def submit(self, block: Block) -> Optional[BlockTemplate]: return None -def create_tx_from_dict(data: Dict[str, Any], update_hash: bool = False, +def create_tx_from_dict(data: dict[str, Any], update_hash: bool = False, storage: Optional[TransactionStorage] = None) -> BaseTransaction: import base64 diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index 7aaa14322..fb55819fb 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -413,7 +413,7 @@ def _parse_checkpoints(checkpoints: Union[dict[int, str], list[Checkpoint]]) -> ] if not isinstance(checkpoints, list): - raise TypeError(f'expected \'Dict[int, str]\' or \'List[Checkpoint]\', got {checkpoints}') + raise TypeError(f'expected \'dict[int, str]\' or \'list[Checkpoint]\', got {checkpoints}') return checkpoints diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index a16810829..4a1d39139 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -13,7 +13,7 @@ # limitations under the License. from itertools import chain -from typing import TYPE_CHECKING, Iterable, List, Optional, Set, cast +from typing import TYPE_CHECKING, Iterable, Optional, cast from structlog import get_logger @@ -113,7 +113,7 @@ def update_voided_info(self, block: Block) -> None: assert storage.indexes is not None # Union of voided_by of parents - voided_by: Set[bytes] = self.union_voided_by_from_parents(block) + voided_by: set[bytes] = self.union_voided_by_from_parents(block) # Update accumulated weight of the transactions voiding us. assert block.hash not in voided_by @@ -219,14 +219,14 @@ def update_voided_info(self, block: Block) -> None: if not meta.voided_by: self.context.mark_as_reorg(common_block) - def union_voided_by_from_parents(self, block: Block) -> Set[bytes]: + def union_voided_by_from_parents(self, block: Block) -> set[bytes]: """Return the union of the voided_by of block's parents. It does not include the hash of blocks because the hash of blocks are not propagated through the chains. For further information, see the docstring of the ConsensusAlgorithm class. """ - voided_by: Set[bytes] = set() + voided_by: set[bytes] = set() for parent in block.get_parents(): assert parent.hash is not None parent_meta = parent.get_metadata() @@ -247,7 +247,7 @@ def update_voided_by_from_parents(self, block: Block) -> bool: """Update block's metadata voided_by from parents. Return True if the block is voided and False otherwise.""" assert block.storage is not None - voided_by: Set[bytes] = self.union_voided_by_from_parents(block) + voided_by: set[bytes] = self.union_voided_by_from_parents(block) if voided_by: meta = block.get_metadata() if meta.voided_by: @@ -259,7 +259,7 @@ def update_voided_by_from_parents(self, block: Block) -> bool: return True return False - def add_voided_by_to_multiple_chains(self, block: Block, heads: List[Block], first_block: Block) -> None: + def add_voided_by_to_multiple_chains(self, block: Block, heads: list[Block], first_block: Block) -> None: # We need to go through all side chains because there may be non-voided blocks # that must be voided. # For instance, imagine two chains with intersection with both heads voided. @@ -290,7 +290,7 @@ def update_score_and_mark_as_the_best_chain_if_possible(self, block: Block) -> N storage = block.storage heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()] best_score = 0.0 - best_heads: List[Block] + best_heads: list[Block] for head in heads: head_meta = head.get_metadata(force_reload=True) if head_meta.score <= best_score - settings.WEIGHT_TOL: @@ -446,7 +446,7 @@ def remove_first_block_markers(self, block: Block) -> None: meta.first_block = None self.context.save(tx) - def _score_block_dfs(self, block: BaseTransaction, used: Set[bytes], + def _score_block_dfs(self, block: BaseTransaction, used: set[bytes], mark_as_best_chain: bool, newest_timestamp: int) -> float: """ Internal method to run a DFS. It is used by `calculate_score()`. """ @@ -526,7 +526,7 @@ def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> parent = self._find_first_parent_in_best_chain(block) newest_timestamp = parent.timestamp - used: Set[bytes] = set() + used: set[bytes] = set() return self._score_block_dfs(block, used, mark_as_best_chain, newest_timestamp) diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py index 882c2936a..dd0e97808 100644 --- a/hathor/consensus/transaction_consensus.py +++ b/hathor/consensus/transaction_consensus.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Iterable, List, Set, cast +from typing import TYPE_CHECKING, Iterable, cast from structlog import get_logger @@ -173,7 +173,7 @@ def update_voided_info(self, tx: Transaction) -> None: assert tx.hash is not None assert tx.storage is not None - voided_by: Set[bytes] = set() + voided_by: set[bytes] = set() # Union of voided_by of parents for parent in tx.get_parents(): @@ -271,8 +271,8 @@ def check_conflicts(self, tx: Transaction) -> None: return # Filter the possible candidates to compare to tx. - candidates: List[Transaction] = [] - conflict_list: List[Transaction] = [] + candidates: list[Transaction] = [] + conflict_list: list[Transaction] = [] for h in meta.conflict_with or []: conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) conflict_list.append(conflict_tx) @@ -348,7 +348,7 @@ def remove_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: self.log.debug('remove_voided_by', tx=tx.hash_hex, voided_hash=voided_hash.hex()) bfs = BFSWalk(tx.storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True) - check_list: List[BaseTransaction] = [] + check_list: list[BaseTransaction] = [] for tx2 in bfs.run(tx, skip_root=False): assert tx2.storage is not None @@ -406,7 +406,7 @@ def add_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: from hathor.transaction.storage.traversal import BFSWalk bfs = BFSWalk(tx.storage, is_dag_funds=True, is_dag_verifications=is_dag_verifications, is_left_to_right=True) - check_list: List[Transaction] = [] + check_list: list[Transaction] = [] for tx2 in bfs.run(tx, skip_root=False): assert tx2.storage is not None assert tx2.hash is not None diff --git a/hathor/daa.py b/hathor/daa.py index c2bb68a4c..99ff40dde 100644 --- a/hathor/daa.py +++ b/hathor/daa.py @@ -21,7 +21,7 @@ from enum import IntFlag from math import log -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from structlog import get_logger @@ -90,7 +90,7 @@ def calculate_next_weight(parent_block: 'Block', timestamp: int) -> float: if N < 10: return MIN_BLOCK_WEIGHT - blocks: List['Block'] = [] + blocks: list['Block'] = [] while len(blocks) < N + 1: blocks.append(root) root = root.get_block_parent() diff --git a/hathor/debug_resources.py b/hathor/debug_resources.py index 080dc6950..1641dec16 100644 --- a/hathor/debug_resources.py +++ b/hathor/debug_resources.py @@ -14,7 +14,6 @@ import os import sys -from typing import Type from structlog import get_logger from twisted.internet import defer @@ -53,7 +52,7 @@ class DebugRaiseResource(Resource): } default_msg = 'exception raised for debugging purposes' - def run(self, exc_cls: Type[BaseException], msg: str) -> None: + def run(self, exc_cls: type[BaseException], msg: str) -> None: raise exc_cls(msg) def render_GET(self, request: Request) -> bytes: @@ -79,7 +78,7 @@ class DebugRejectResource(DebugRaiseResource): } default_msg = 'deferred rejected for debugging purposes' - def run(self, exc_cls: Type[BaseException], msg: str) -> None: + def run(self, exc_cls: type[BaseException], msg: str) -> None: deferred: defer.Deferred[None] = defer.Deferred() deferred.errback(exc_cls(msg)) diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py index f5c341593..7ab905473 100644 --- a/hathor/event/model/event_data.py +++ b/hathor/event/model/event_data.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Optional, Union, cast +from typing import Optional, Union, cast from pydantic import Extra, validator @@ -34,17 +34,17 @@ class TxOutput(BaseModel): class SpentOutput(BaseModel): index: int - tx_ids: List[str] + tx_ids: list[str] class TxMetadata(BaseModel, extra=Extra.ignore): hash: str - spent_outputs: List[SpentOutput] - conflict_with: List[str] - voided_by: List[str] - received_by: List[int] - children: List[str] - twins: List[str] + spent_outputs: list[SpentOutput] + conflict_with: list[str] + voided_by: list[str] + received_by: list[int] + children: list[str] + twins: list[str] accumulated_weight: float score: float first_block: Optional[str] @@ -52,7 +52,7 @@ class TxMetadata(BaseModel, extra=Extra.ignore): validation: str @validator('spent_outputs', pre=True, each_item=True) - def _parse_spent_outputs(cls, spent_output: Union[SpentOutput, List[Union[int, List[str]]]]) -> SpentOutput: + def _parse_spent_outputs(cls, spent_output: Union[SpentOutput, list[Union[int, list[str]]]]) -> SpentOutput: """ This validator method is called by pydantic when parsing models, and is not supposed to be called directly. It either returns a SpentOutput if it receives one, or tries to parse it as a list (as returned from @@ -70,7 +70,7 @@ def _parse_spent_outputs(cls, spent_output: Union[SpentOutput, List[Union[int, L return SpentOutput( index=cast(int, index), - tx_ids=cast(List[str], tx_ids) + tx_ids=cast(list[str], tx_ids) ) @@ -92,10 +92,10 @@ class TxData(BaseEventData, extra=Extra.ignore): timestamp: int version: int weight: float - inputs: List['TxInput'] - outputs: List['TxOutput'] - parents: List[str] - tokens: List[str] + inputs: list['TxInput'] + outputs: list['TxOutput'] + parents: list[str] + tokens: list[str] # TODO: Token name and symbol could be in a different class because they're only used by TokenCreationTransaction token_name: Optional[str] token_symbol: Optional[str] diff --git a/hathor/event/model/event_type.py b/hathor/event/model/event_type.py index ff3df2f62..d6f2bc8c4 100644 --- a/hathor/event/model/event_type.py +++ b/hathor/event/model/event_type.py @@ -13,7 +13,6 @@ # limitations under the License. from enum import Enum -from typing import Dict, Type from hathor.event.model.event_data import BaseEventData, EmptyData, ReorgData, TxData from hathor.pubsub import HathorEvents @@ -35,7 +34,7 @@ def from_hathor_event(cls, hathor_event: HathorEvents) -> 'EventType': return event - def data_type(self) -> Type[BaseEventData]: + def data_type(self) -> type[BaseEventData]: return _EVENT_TYPE_TO_EVENT_DATA[self] @@ -48,7 +47,7 @@ def data_type(self) -> Type[BaseEventData]: HathorEvents.CONSENSUS_TX_UPDATE: EventType.VERTEX_METADATA_CHANGED } -_EVENT_TYPE_TO_EVENT_DATA: Dict[EventType, Type[BaseEventData]] = { +_EVENT_TYPE_TO_EVENT_DATA: dict[EventType, type[BaseEventData]] = { EventType.LOAD_STARTED: EmptyData, EventType.LOAD_FINISHED: EmptyData, EventType.NEW_VERTEX_ACCEPTED: TxData, diff --git a/hathor/event/resources/event.py b/hathor/event/resources/event.py index cde4e6740..b3b7dac38 100644 --- a/hathor/event/resources/event.py +++ b/hathor/event/resources/event.py @@ -13,7 +13,7 @@ # limitations under the License. from itertools import islice -from typing import List, Optional +from typing import Optional from pydantic import Field, NonNegativeInt @@ -70,7 +70,7 @@ class GetEventsParams(QueryParams): class GetEventsResponse(Response): - events: List[BaseEvent] + events: list[BaseEvent] latest_event_id: Optional[int] diff --git a/hathor/event/storage/memory_storage.py b/hathor/event/storage/memory_storage.py index b790bbe1f..0603b2826 100644 --- a/hathor/event/storage/memory_storage.py +++ b/hathor/event/storage/memory_storage.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Iterator, List, Optional +from typing import Iterator, Optional from hathor.event.model.base_event import BaseEvent from hathor.event.model.node_state import NodeState @@ -21,7 +21,7 @@ class EventMemoryStorage(EventStorage): def __init__(self) -> None: - self._events: List[BaseEvent] = [] + self._events: list[BaseEvent] = [] self._last_event: Optional[BaseEvent] = None self._last_group_id: Optional[int] = None self._node_state: Optional[NodeState] = None diff --git a/hathor/event/websocket/protocol.py b/hathor/event/websocket/protocol.py index 26c70891b..5a5906ec2 100644 --- a/hathor/event/websocket/protocol.py +++ b/hathor/event/websocket/protocol.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Callable, Dict, Optional, Type +from typing import TYPE_CHECKING, Callable, Optional from autobahn.exception import Disconnected from autobahn.twisted.websocket import WebSocketServerProtocol @@ -91,7 +91,7 @@ def onMessage(self, payload: bytes, isBinary: bool) -> None: def _handle_request(self, request: Request) -> None: # This could be a pattern match in Python 3.10 request_type = type(request) - handlers: Dict[Type, Callable] = { + handlers: dict[type, Callable] = { StartStreamRequest: self._handle_start_stream_request, AckRequest: self._handle_ack_request, StopStreamRequest: lambda _: self._handle_stop_stream_request() diff --git a/hathor/graphviz.py b/hathor/graphviz.py index a62fcf210..f0abe04fe 100644 --- a/hathor/graphviz.py +++ b/hathor/graphviz.py @@ -14,7 +14,7 @@ from itertools import chain -from typing import Dict, Iterator, Set +from typing import Iterator from graphviz import Digraph @@ -55,11 +55,11 @@ def __init__(self, storage: TransactionStorage, include_funds: bool = False, self.not_fully_validated_attrs = dict(style='dashed,filled', penwidth='0.25', fillcolor='#F9FFAB') # Labels - self.labels: Dict[bytes, str] = {} + self.labels: dict[bytes, str] = {} # Internals - self._blocks_set: Set[bytes] = set() - self._txs_set: Set[bytes] = set() + self._blocks_set: set[bytes] = set() + self._txs_set: set[bytes] = set() def get_node_label(self, tx: BaseTransaction) -> str: """ Return the node's label for tx. @@ -77,7 +77,7 @@ def get_node_label(self, tx: BaseTransaction) -> str: parts.append('a: {:.2f}'.format(meta.accumulated_weight)) return '\n'.join(parts) - def get_node_attrs(self, tx: BaseTransaction) -> Dict[str, str]: + def get_node_attrs(self, tx: BaseTransaction) -> dict[str, str]: """ Return node's attributes. """ assert tx.hash is not None @@ -102,7 +102,7 @@ def get_node_attrs(self, tx: BaseTransaction) -> Dict[str, str]: return node_attrs - def get_edge_attrs(self, tx: BaseTransaction, neighbor_hash: bytes) -> Dict[str, str]: + def get_edge_attrs(self, tx: BaseTransaction, neighbor_hash: bytes) -> dict[str, str]: """ Return edge's attributes. """ edge_attrs = {} @@ -112,12 +112,12 @@ def get_edge_attrs(self, tx: BaseTransaction, neighbor_hash: bytes) -> Dict[str, edge_attrs.update(dict(penwidth='1')) return edge_attrs - def get_parent_edge_attrs(self, tx: BaseTransaction, neighbor_hash: bytes) -> Dict[str, str]: + def get_parent_edge_attrs(self, tx: BaseTransaction, neighbor_hash: bytes) -> dict[str, str]: """ Return edge's attributes for a verification edge. """ return self.get_edge_attrs(tx, neighbor_hash) - def get_input_edge_attrs(self, tx: BaseTransaction, neighbor_hash: bytes) -> Dict[str, str]: + def get_input_edge_attrs(self, tx: BaseTransaction, neighbor_hash: bytes) -> dict[str, str]: """ Return edge's attributes for a fund edge. """ edge_attrs = self.get_edge_attrs(tx, neighbor_hash) @@ -139,8 +139,8 @@ def dot(self, format: str = 'pdf') -> Digraph: dot = Digraph(format=format) dot.attr('node', shape='oval', style='') - self._blocks_set = set() # Set[bytes(hash)] - self._txs_set = set() # Set[bytes(hash)] + self._blocks_set = set() # set[bytes(hash)] + self._txs_set = set() # set[bytes(hash)] g_blocks = dot.subgraph(name='blocks') g_txs = dot.subgraph(name='txs') diff --git a/hathor/indexes/address_index.py b/hathor/indexes/address_index.py index 73c8da1ef..9711e985f 100644 --- a/hathor/indexes/address_index.py +++ b/hathor/indexes/address_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import TYPE_CHECKING, Iterable, List, Optional +from typing import TYPE_CHECKING, Iterable, Optional from structlog import get_logger @@ -86,13 +86,13 @@ def remove_tx(self, tx: BaseTransaction) -> None: raise NotImplementedError @abstractmethod - def get_from_address(self, address: str) -> List[bytes]: + def get_from_address(self, address: str) -> list[bytes]: """ Get list of transaction hashes of an address """ raise NotImplementedError @abstractmethod - def get_sorted_from_address(self, address: str) -> List[bytes]: + def get_sorted_from_address(self, address: str) -> list[bytes]: """ Get a sorted list of transaction hashes of an address """ raise NotImplementedError diff --git a/hathor/indexes/deps_index.py b/hathor/indexes/deps_index.py index 81362deac..8990b11dc 100644 --- a/hathor/indexes/deps_index.py +++ b/hathor/indexes/deps_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import TYPE_CHECKING, Iterator, List +from typing import TYPE_CHECKING, Iterator from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope @@ -167,7 +167,7 @@ def _iter_needed_txs(self) -> Iterator[bytes]: raise NotImplementedError @abstractmethod - def known_children(self, tx: BaseTransaction) -> List[bytes]: + def known_children(self, tx: BaseTransaction) -> list[bytes]: """Return the hashes of all reverse dependencies that are children of the given tx. That is, they depend on `tx` because they are children of `tx`, and not because `tx` is an input. This is diff --git a/hathor/indexes/height_index.py b/hathor/indexes/height_index.py index 2a62cfc2c..4ac6715db 100644 --- a/hathor/indexes/height_index.py +++ b/hathor/indexes/height_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import List, NamedTuple, Optional, Tuple +from typing import NamedTuple, Optional from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope @@ -85,7 +85,7 @@ def get_tip(self) -> bytes: raise NotImplementedError @abstractmethod - def get_height_tip(self) -> Tuple[int, bytes]: + def get_height_tip(self) -> tuple[int, bytes]: """ Return the best block height and hash, it returns the genesis when there is no other block """ raise NotImplementedError @@ -98,7 +98,7 @@ def update_new_chain(self, height: int, block: Block) -> None: block_height = height side_chain_block = block - add_to_index: List[_AddToIndexItem] = [] + add_to_index: list[_AddToIndexItem] = [] while self.get(block_height) != side_chain_block.hash: add_to_index.append( _AddToIndexItem(block_height, not_none(side_chain_block.hash), side_chain_block.timestamp) diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index 43c3552dc..d6110a054 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -15,7 +15,7 @@ import operator from abc import ABC, abstractmethod from functools import reduce -from typing import TYPE_CHECKING, Iterator, List, Optional +from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger @@ -136,7 +136,7 @@ def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: db_last_started_at = tx_storage.get_last_started_at() - indexes_to_init: List[BaseIndex] = [] + indexes_to_init: list[BaseIndex] = [] for index in self.iter_all_indexes(): index_db_name = index.get_db_name() if index_db_name is None: diff --git a/hathor/indexes/memory_address_index.py b/hathor/indexes/memory_address_index.py index 3ab6b14f8..25588e594 100644 --- a/hathor/indexes/memory_address_index.py +++ b/hathor/indexes/memory_address_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Iterable, List, Optional +from typing import TYPE_CHECKING, Iterable, Optional from structlog import get_logger @@ -46,10 +46,10 @@ def add_tx(self, tx: BaseTransaction) -> None: super().add_tx(tx) self._publish_tx(tx) - def get_from_address(self, address: str) -> List[bytes]: + def get_from_address(self, address: str) -> list[bytes]: return list(self._get_from_key(address)) - def get_sorted_from_address(self, address: str) -> List[bytes]: + def get_sorted_from_address(self, address: str) -> list[bytes]: return list(self._get_sorted_from_key(address)) def is_address_empty(self, address: str) -> bool: diff --git a/hathor/indexes/memory_deps_index.py b/hathor/indexes/memory_deps_index.py index 2c9d77eda..b596ef98a 100644 --- a/hathor/indexes/memory_deps_index.py +++ b/hathor/indexes/memory_deps_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Dict, FrozenSet, Iterator, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger @@ -28,13 +28,13 @@ class MemoryDepsIndex(DepsIndex): # Reverse dependency mapping - _rev_dep_index: Dict[bytes, Set[bytes]] + _rev_dep_index: dict[bytes, set[bytes]] # Ready to be validated cache - _txs_with_deps_ready: Set[bytes] + _txs_with_deps_ready: set[bytes] # Next to be downloaded - _needed_txs_index: Dict[bytes, Tuple[int, bytes]] + _needed_txs_index: dict[bytes, tuple[int, bytes]] def __init__(self): self.log = logger.new() @@ -106,11 +106,11 @@ def iter(self) -> Iterator[bytes]: def _iter_needed_txs(self) -> Iterator[bytes]: yield from self._needed_txs_index.keys() - def _get_rev_deps(self, tx: bytes) -> FrozenSet[bytes]: + def _get_rev_deps(self, tx: bytes) -> frozenset[bytes]: """Get all txs that depend on the given tx (i.e. its reverse depdendencies).""" return frozenset(self._rev_dep_index.get(tx, set())) - def known_children(self, tx: BaseTransaction) -> List[bytes]: + def known_children(self, tx: BaseTransaction) -> list[bytes]: assert tx.hash is not None assert tx.storage is not None it_rev_deps = map(tx.storage.get_transaction, self._get_rev_deps(tx.hash)) diff --git a/hathor/indexes/memory_height_index.py b/hathor/indexes/memory_height_index.py index 5bdb62a25..7040ce10d 100644 --- a/hathor/indexes/memory_height_index.py +++ b/hathor/indexes/memory_height_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Optional, Tuple +from typing import Optional from hathor.indexes.height_index import BLOCK_GENESIS_ENTRY, HeightIndex, IndexEntry @@ -21,7 +21,7 @@ class MemoryHeightIndex(HeightIndex): """Store the block hash for each given height """ - _index: List[IndexEntry] + _index: list[IndexEntry] def __init__(self) -> None: super().__init__() @@ -68,6 +68,6 @@ def get(self, height: int) -> Optional[bytes]: def get_tip(self) -> bytes: return self._index[-1].hash - def get_height_tip(self) -> Tuple[int, bytes]: + def get_height_tip(self) -> tuple[int, bytes]: height = len(self._index) - 1 return height, self._index[height].hash diff --git a/hathor/indexes/memory_timestamp_index.py b/hathor/indexes/memory_timestamp_index.py index 523e1bb3e..f041f6296 100644 --- a/hathor/indexes/memory_timestamp_index.py +++ b/hathor/indexes/memory_timestamp_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Iterator, List, Optional, Tuple +from typing import Iterator, Optional from sortedcontainers import SortedKeyList from structlog import get_logger @@ -61,16 +61,16 @@ def del_tx(self, tx: BaseTransaction) -> None: if idx < len(self._index) and self._index[idx].hash == tx.hash: self._index.pop(idx) - def get_newest(self, count: int) -> Tuple[List[bytes], bool]: + def get_newest(self, count: int) -> tuple[list[bytes], bool]: return get_newest_sorted_key_list(self._index, count) - def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: return get_older_sorted_key_list(self._index, timestamp, hash_bytes, count) - def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: return get_newer_sorted_key_list(self._index, timestamp, hash_bytes, count) - def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> Tuple[List[bytes], Optional[RangeIdx]]: + def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> tuple[list[bytes], Optional[RangeIdx]]: timestamp, offset = from_idx idx = self._index.bisect_key_left((timestamp, b'')) txs = SortedKeyList(key=lambda x: (x.timestamp, x.hash)) diff --git a/hathor/indexes/memory_tips_index.py b/hathor/indexes/memory_tips_index.py index b8b8c6310..47d8c0eca 100644 --- a/hathor/indexes/memory_tips_index.py +++ b/hathor/indexes/memory_tips_index.py @@ -13,7 +13,7 @@ # limitations under the License. from math import inf -from typing import Dict, Optional, Set +from typing import Optional from intervaltree import Interval, IntervalTree from structlog import get_logger @@ -45,7 +45,7 @@ class MemoryTipsIndex(TipsIndex): # It is a way to access the interval by the hash of the transaction. # It is useful because the interval tree allows access only by the interval. - tx_last_interval: Dict[bytes, Interval] + tx_last_interval: dict[bytes, Interval] def __init__(self, *, scope_type: ScopeType): super().__init__(scope_type=scope_type) @@ -155,5 +155,5 @@ def update_tx(self, tx: BaseTransaction, *, relax_assert: bool = False) -> None: new_interval = Interval(pi.begin, min_timestamp, pi.data) self._add_interval(new_interval) - def __getitem__(self, index: float) -> Set[Interval]: + def __getitem__(self, index: float) -> set[Interval]: return self.tree[index] diff --git a/hathor/indexes/memory_tokens_index.py b/hathor/indexes/memory_tokens_index.py index 9ac0697db..8b001dc51 100644 --- a/hathor/indexes/memory_tokens_index.py +++ b/hathor/indexes/memory_tokens_index.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import defaultdict -from typing import Dict, Iterator, List, Optional, Set, Tuple, cast +from typing import Iterator, Optional, cast from sortedcontainers import SortedKeyList from structlog import get_logger @@ -36,12 +36,12 @@ class MemoryTokenIndexInfo(TokenIndexInfo): _name: Optional[str] _symbol: Optional[str] _total: int - _mint: Set[TokenUtxoInfo] - _melt: Set[TokenUtxoInfo] + _mint: set[TokenUtxoInfo] + _melt: set[TokenUtxoInfo] _transactions: 'SortedKeyList[TransactionIndexElement]' def __init__(self, name: Optional[str] = None, symbol: Optional[str] = None, total: int = 0, - mint: Optional[Set[TokenUtxoInfo]] = None, melt: Optional[Set[TokenUtxoInfo]] = None) -> None: + mint: Optional[set[TokenUtxoInfo]] = None, melt: Optional[set[TokenUtxoInfo]] = None) -> None: self._name = name self._symbol = symbol self._total = total @@ -75,7 +75,7 @@ def get_db_name(self) -> Optional[str]: return None def force_clear(self) -> None: - self._tokens: Dict[bytes, MemoryTokenIndexInfo] = defaultdict(MemoryTokenIndexInfo) + self._tokens: dict[bytes, MemoryTokenIndexInfo] = defaultdict(MemoryTokenIndexInfo) def _add_to_index(self, tx: BaseTransaction, index: int) -> None: """ Add tx to mint/melt indexes and total amount @@ -165,7 +165,7 @@ def del_tx(self, tx: BaseTransaction) -> None: assert tx.hash is not None del self._tokens[tx.hash] - def iter_all_tokens(self) -> Iterator[Tuple[bytes, TokenIndexInfo]]: + def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: yield from self._tokens.items() def get_token_info(self, token_uid: bytes) -> TokenIndexInfo: @@ -182,7 +182,7 @@ def get_transactions_count(self, token_uid: bytes) -> int: info = self._tokens[token_uid] return len(info._transactions) - def get_newest_transactions(self, token_uid: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_newest_transactions(self, token_uid: bytes, count: int) -> tuple[list[bytes], bool]: assert is_token_uid_valid(token_uid) if token_uid not in self._tokens: return [], False @@ -190,7 +190,7 @@ def get_newest_transactions(self, token_uid: bytes, count: int) -> Tuple[List[by return get_newest_sorted_key_list(transactions, count) def get_older_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> Tuple[List[bytes], bool]: + ) -> tuple[list[bytes], bool]: assert is_token_uid_valid(token_uid) if token_uid not in self._tokens: return [], False @@ -198,7 +198,7 @@ def get_older_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: b return get_older_sorted_key_list(transactions, timestamp, hash_bytes, count) def get_newer_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> Tuple[List[bytes], bool]: + ) -> tuple[list[bytes], bool]: assert is_token_uid_valid(token_uid) if token_uid not in self._tokens: return [], False diff --git a/hathor/indexes/memory_tx_group_index.py b/hathor/indexes/memory_tx_group_index.py index 752e04762..39c0f9470 100644 --- a/hathor/indexes/memory_tx_group_index.py +++ b/hathor/indexes/memory_tx_group_index.py @@ -14,7 +14,7 @@ from abc import abstractmethod from collections import defaultdict -from typing import DefaultDict, Iterable, Set, Sized, TypeVar +from typing import Iterable, Set, Sized, TypeVar from structlog import get_logger @@ -31,7 +31,7 @@ class MemoryTxGroupIndex(TxGroupIndex[KT]): """Memory implementation of the TxGroupIndex. This class is abstract and cannot be used directly. """ - index: DefaultDict[KT, Set[bytes]] + index: defaultdict[KT, Set[bytes]] def __init__(self) -> None: self.force_clear() diff --git a/hathor/indexes/memory_utxo_index.py b/hathor/indexes/memory_utxo_index.py index cf09edc9f..ff1872800 100644 --- a/hathor/indexes/memory_utxo_index.py +++ b/hathor/indexes/memory_utxo_index.py @@ -14,7 +14,7 @@ from collections import defaultdict from dataclasses import dataclass, field -from typing import DefaultDict, Iterator, NamedTuple, Optional, Union +from typing import Iterator, NamedTuple, Optional, Union from sortedcontainers import SortedSet from structlog import get_logger @@ -60,7 +60,7 @@ class _IndexItem: class MemoryUtxoIndex(UtxoIndex): - _index: DefaultDict[_IndexKey, _IndexItem] + _index: defaultdict[_IndexKey, _IndexItem] def __init__(self): super().__init__() diff --git a/hathor/indexes/partial_rocksdb_tips_index.py b/hathor/indexes/partial_rocksdb_tips_index.py index 8757705b1..4a0d83c6d 100644 --- a/hathor/indexes/partial_rocksdb_tips_index.py +++ b/hathor/indexes/partial_rocksdb_tips_index.py @@ -13,7 +13,7 @@ # limitations under the License. import math -from typing import TYPE_CHECKING, Dict, Iterator, Optional, Union +from typing import TYPE_CHECKING, Iterator, Optional, Union from intervaltree import Interval, IntervalTree from structlog import get_logger @@ -59,7 +59,7 @@ class PartialRocksDBTipsIndex(MemoryTipsIndex, RocksDBIndexUtils): # It is a way to access the interval by the hash of the transaction. # It is useful because the interval tree allows access only by the interval. - tx_last_interval: Dict[bytes, Interval] + tx_last_interval: dict[bytes, Interval] def __init__(self, db: 'rocksdb.DB', *, scope_type: ScopeType): MemoryTipsIndex.__init__(self, scope_type=scope_type) diff --git a/hathor/indexes/rocksdb_address_index.py b/hathor/indexes/rocksdb_address_index.py index 74f978fa7..f9f1c0322 100644 --- a/hathor/indexes/rocksdb_address_index.py +++ b/hathor/indexes/rocksdb_address_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Iterable, List, Optional +from typing import TYPE_CHECKING, Iterable, Optional from structlog import get_logger @@ -63,10 +63,10 @@ def add_tx(self, tx: BaseTransaction) -> None: super().add_tx(tx) self._publish_tx(tx) - def get_from_address(self, address: str) -> List[bytes]: + def get_from_address(self, address: str) -> list[bytes]: return list(self._get_from_key(address)) - def get_sorted_from_address(self, address: str) -> List[bytes]: + def get_sorted_from_address(self, address: str) -> list[bytes]: return list(self._get_sorted_from_key(address)) def is_address_empty(self, address: str) -> bool: diff --git a/hathor/indexes/rocksdb_deps_index.py b/hathor/indexes/rocksdb_deps_index.py index d5e40b788..fee70fb99 100644 --- a/hathor/indexes/rocksdb_deps_index.py +++ b/hathor/indexes/rocksdb_deps_index.py @@ -14,7 +14,7 @@ from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, FrozenSet, Iterator, List, Optional, Tuple +from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger @@ -154,7 +154,7 @@ def _to_value_needed(self, height: int, tx_hash: bytes) -> bytes: assert len(value) == 4 + 32 return bytes(value) - def _from_value_needed(self, value: bytes) -> Tuple[int, bytes]: + def _from_value_needed(self, value: bytes) -> tuple[int, bytes]: import struct assert len(value) == 4 + 32 height, = struct.unpack('!I', value[:4]) @@ -264,7 +264,7 @@ def next_ready_for_validation(self, tx_storage: 'TransactionStorage', *, dry_run if not dry_run: self._db.write(batch) - def _drain_all_sorted_ready(self, tx_storage: 'TransactionStorage', batch: 'rocksdb.WriteBatch') -> List[bytes]: + def _drain_all_sorted_ready(self, tx_storage: 'TransactionStorage', batch: 'rocksdb.WriteBatch') -> list[bytes]: ready = list(self._drain_all_ready(tx_storage, batch)) ready.sort(key=lambda tx_hash: tx_storage.get_transaction(tx_hash).timestamp) return ready @@ -312,20 +312,20 @@ def _iter_has_rev_deps(self) -> Iterator[bytes]: it.seek(seek_key) self.log.debug('seek end') - def known_children(self, tx: BaseTransaction) -> List[bytes]: + def known_children(self, tx: BaseTransaction) -> list[bytes]: assert tx.hash is not None assert tx.storage is not None it_rev_deps = map(tx.storage.get_transaction, self._get_rev_deps(tx.hash)) return [not_none(rev.hash) for rev in it_rev_deps if tx.hash in rev.parents] - def _get_rev_deps(self, tx: bytes) -> FrozenSet[bytes]: + def _get_rev_deps(self, tx: bytes) -> frozenset[bytes]: """Get all txs that depend on the given tx (i.e. its reverse depdendencies).""" return frozenset(self._iter_rev_deps_of(tx)) def has_needed_tx(self) -> bool: return any(self._iter_needed()) - def _iter_needed(self) -> Iterator[Tuple[bytes, int, bytes]]: + def _iter_needed(self) -> Iterator[tuple[bytes, int, bytes]]: """Iterate over needed txs items, which is a tuple of (tx_dep_hash, height, tx_requested_hash)""" it = self._db.iteritems(self._cf) seek_key = self._to_key_needed() diff --git a/hathor/indexes/rocksdb_height_index.py b/hathor/indexes/rocksdb_height_index.py index c652ab0c9..022f60b0c 100644 --- a/hathor/indexes/rocksdb_height_index.py +++ b/hathor/indexes/rocksdb_height_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Optional, Tuple +from typing import TYPE_CHECKING, Optional from structlog import get_logger @@ -141,7 +141,7 @@ def get_tip(self) -> bytes: assert value is not None # must never be empty, at least genesis has been added return self._from_value(value).hash - def get_height_tip(self) -> Tuple[int, bytes]: + def get_height_tip(self) -> tuple[int, bytes]: it = self._db.iteritems(self._cf) it.seek_to_last() (_, key), value = it.get() diff --git a/hathor/indexes/rocksdb_timestamp_index.py b/hathor/indexes/rocksdb_timestamp_index.py index 01e4e609b..c505820a3 100644 --- a/hathor/indexes/rocksdb_timestamp_index.py +++ b/hathor/indexes/rocksdb_timestamp_index.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Iterator, List, Optional, Tuple +from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger @@ -63,7 +63,7 @@ def _to_key(self, timestamp: int, tx_hash: Optional[bytes] = None) -> bytes: assert len(key) == 4 + 32 return bytes(key) - def _from_key(self, key: bytes) -> Tuple[int, bytes]: + def _from_key(self, key: bytes) -> tuple[int, bytes]: """Parse a key on the column-family.""" import struct assert len(key) == 4 + 32 @@ -85,7 +85,7 @@ def del_tx(self, tx: BaseTransaction) -> None: self._db.delete((self._cf, key)) def _iter(self, from_timestamp: Optional[int] = None, from_tx: Optional[bytes] = None, - *, reverse: bool = False) -> Iterator[Tuple[int, bytes]]: + *, reverse: bool = False) -> Iterator[tuple[int, bytes]]: """ Iterate over transactions optionally starting from a timestamp/hash, by default from oldest to newest. If we request with from_timestamp=ts1 and from_tx=tx1, (ts1,tx1) will not be returned by the iterator. @@ -123,24 +123,24 @@ def _iter(self, from_timestamp: Optional[int] = None, from_tx: Optional[bytes] = yield timestamp, tx_hash self.log.debug('seek end') - def get_newest(self, count: int) -> Tuple[List[bytes], bool]: + def get_newest(self, count: int) -> tuple[list[bytes], bool]: it = (x for _, x in self._iter(reverse=True)) return collect_n(it, count) - def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: it = (x for _, x in self._iter(timestamp, hash_bytes, reverse=True)) return collect_n(it, count) - def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: it = (x for _, x in self._iter(timestamp, hash_bytes)) return collect_n(it, count) - def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> Tuple[List[bytes], Optional[RangeIdx]]: + def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> tuple[list[bytes], Optional[RangeIdx]]: if count <= 0: raise ValueError(f'count must be positive, got {count}') timestamp, offset = from_idx it = skip_n(self._iter(timestamp), offset) - hashes: List[bytes] = [] + hashes: list[bytes] = [] n = count next_timestamp = timestamp next_offset = offset diff --git a/hathor/indexes/rocksdb_tokens_index.py b/hathor/indexes/rocksdb_tokens_index.py index 633beb348..2f001610d 100644 --- a/hathor/indexes/rocksdb_tokens_index.py +++ b/hathor/indexes/rocksdb_tokens_index.py @@ -14,7 +14,7 @@ from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Iterator, List, NamedTuple, Optional, Tuple, TypedDict, cast +from typing import TYPE_CHECKING, Iterator, NamedTuple, Optional, TypedDict, cast from structlog import get_logger @@ -330,7 +330,7 @@ def del_tx(self, tx: BaseTransaction) -> None: assert tx.hash is not None self._destroy_token(tx.hash) - def iter_all_tokens(self) -> Iterator[Tuple[bytes, TokenIndexInfo]]: + def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: self.log.debug('seek to start') it = self._db.iteritems(self._cf) it.seek(bytes([_Tag.INFO.value])) @@ -388,17 +388,17 @@ def get_transactions_count(self, token_uid: bytes) -> int: # TODO: maybe it's possible to optimize this with rocksdb prefix stuff return sum(1 for _ in self._iter_transactions(token_uid)) - def get_newest_transactions(self, token_uid: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_newest_transactions(self, token_uid: bytes, count: int) -> tuple[list[bytes], bool]: it = self._iter_transactions(token_uid, reverse=True) return collect_n(it, count) def get_older_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> Tuple[List[bytes], bool]: + ) -> tuple[list[bytes], bool]: it = self._iter_transactions(token_uid, _TxIndex(hash_bytes, timestamp), reverse=True) return collect_n(it, count) def get_newer_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> Tuple[List[bytes], bool]: + ) -> tuple[list[bytes], bool]: it = self._iter_transactions(token_uid, _TxIndex(hash_bytes, timestamp)) return collect_n(it, count) diff --git a/hathor/indexes/rocksdb_tx_group_index.py b/hathor/indexes/rocksdb_tx_group_index.py index 1706eb3b1..bbbe19790 100644 --- a/hathor/indexes/rocksdb_tx_group_index.py +++ b/hathor/indexes/rocksdb_tx_group_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import TYPE_CHECKING, Iterable, Optional, Sized, Tuple, TypeVar +from typing import TYPE_CHECKING, Iterable, Optional, Sized, TypeVar from structlog import get_logger @@ -81,7 +81,7 @@ def _to_rocksdb_key(self, key: KT, tx: Optional[BaseTransaction] = None) -> byte assert len(rocksdb_key) == self._KEY_SIZE + 4 + 32 return rocksdb_key - def _from_rocksdb_key(self, rocksdb_key: bytes) -> Tuple[KT, int, bytes]: + def _from_rocksdb_key(self, rocksdb_key: bytes) -> tuple[KT, int, bytes]: import struct assert len(rocksdb_key) == self._KEY_SIZE + 4 + 32 key = self._deserialize_key(rocksdb_key[:self._KEY_SIZE]) diff --git a/hathor/indexes/rocksdb_utils.py b/hathor/indexes/rocksdb_utils.py index 3a98fed2d..87fddcb54 100644 --- a/hathor/indexes/rocksdb_utils.py +++ b/hathor/indexes/rocksdb_utils.py @@ -13,7 +13,7 @@ # limitations under the License. from collections.abc import Collection -from typing import TYPE_CHECKING, Dict, Iterable, Iterator, NewType +from typing import TYPE_CHECKING, Iterable, Iterator, NewType from hathor.conf import HathorSettings @@ -114,7 +114,7 @@ def clear(self) -> None: assert new_id != old_id self._log.debug('got new column family', id=new_id, old_id=old_id) - def _clone_into_dict(self) -> Dict[bytes, bytes]: + def _clone_into_dict(self) -> dict[bytes, bytes]: """This method will make a copy of the database into a plain dict, be careful when running on large dbs.""" it = self._db.iteritems(self._cf) it.seek_to_first() diff --git a/hathor/indexes/timestamp_index.py b/hathor/indexes/timestamp_index.py index a738dfc47..76d15a1d7 100644 --- a/hathor/indexes/timestamp_index.py +++ b/hathor/indexes/timestamp_index.py @@ -14,7 +14,7 @@ from abc import abstractmethod from enum import Enum -from typing import Iterator, List, NamedTuple, Optional, Tuple +from typing import Iterator, NamedTuple, Optional from structlog import get_logger @@ -82,7 +82,7 @@ def del_tx(self, tx: BaseTransaction) -> None: raise NotImplementedError @abstractmethod - def get_newest(self, count: int) -> Tuple[List[bytes], bool]: + def get_newest(self, count: int) -> tuple[list[bytes], bool]: """ Get transactions or blocks from the newest to the oldest :param count: Number of transactions or blocks to be returned @@ -91,7 +91,7 @@ def get_newest(self, count: int) -> Tuple[List[bytes], bool]: raise NotImplementedError @abstractmethod - def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: """ Get transactions or blocks from the timestamp/hash_bytes reference to the oldest :param timestamp: Timestamp reference to start the search @@ -102,7 +102,7 @@ def get_older(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List raise NotImplementedError @abstractmethod - def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: """ Get transactions or blocks from the timestamp/hash_bytes reference to the newest :param timestamp: Timestamp reference to start the search @@ -113,7 +113,7 @@ def get_newer(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List raise NotImplementedError @abstractmethod - def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> Tuple[List[bytes], Optional[RangeIdx]]: + def get_hashes_and_next_idx(self, from_idx: RangeIdx, count: int) -> tuple[list[bytes], Optional[RangeIdx]]: """ Get up to count hashes if available and the next range-index, this is used by sync-v1. """ raise NotImplementedError diff --git a/hathor/indexes/tokens_index.py b/hathor/indexes/tokens_index.py index 9528ee32b..07b47655b 100644 --- a/hathor/indexes/tokens_index.py +++ b/hathor/indexes/tokens_index.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import ABC, abstractmethod -from typing import Iterator, List, NamedTuple, Optional, Tuple +from typing import Iterator, NamedTuple, Optional from hathor.indexes.base_index import BaseIndex from hathor.indexes.scope import Scope @@ -91,7 +91,7 @@ def del_tx(self, tx: BaseTransaction) -> None: raise NotImplementedError @abstractmethod - def iter_all_tokens(self) -> Iterator[Tuple[bytes, TokenIndexInfo]]: + def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: """ Iterate over all tokens, yields tuples of (token_uid, token_index_info) """ raise NotImplementedError @@ -114,21 +114,21 @@ def get_transactions_count(self, token_uid: bytes) -> int: raise NotImplementedError @abstractmethod - def get_newest_transactions(self, token_uid: bytes, count: int) -> Tuple[List[bytes], bool]: + def get_newest_transactions(self, token_uid: bytes, count: int) -> tuple[list[bytes], bool]: """ Get transactions from the newest to the oldest """ raise NotImplementedError @abstractmethod def get_older_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> Tuple[List[bytes], bool]: + ) -> tuple[list[bytes], bool]: """ Get transactions from the timestamp/hash_bytes reference to the oldest """ raise NotImplementedError @abstractmethod def get_newer_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: bytes, count: int - ) -> Tuple[List[bytes], bool]: + ) -> tuple[list[bytes], bool]: """ Get transactions from the timestamp/hash_bytes reference to the newest """ raise NotImplementedError diff --git a/hathor/indexes/utils.py b/hathor/indexes/utils.py index 8d9afa4c5..949e59c0e 100644 --- a/hathor/indexes/utils.py +++ b/hathor/indexes/utils.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, NamedTuple, Tuple +from typing import NamedTuple from sortedcontainers import SortedKeyList from structlog import get_logger @@ -26,7 +26,7 @@ class TransactionIndexElement(NamedTuple): def get_newest_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement]', count: int - ) -> Tuple[List[bytes], bool]: + ) -> tuple[list[bytes], bool]: """ Get newest data from a sorted key list Return the elements (quantity is the 'count' parameter) and a boolean indicating if has more """ @@ -45,7 +45,7 @@ def get_newest_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement] def get_older_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement]', timestamp: int, - hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: """ Get sorted key list data from the timestamp/hash_bytes reference to the oldest Return the elements (quantity is the 'count' parameter) and a boolean indicating if has more """ @@ -61,7 +61,7 @@ def get_older_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement]' def get_newer_sorted_key_list(key_list: 'SortedKeyList[TransactionIndexElement]', timestamp: int, - hash_bytes: bytes, count: int) -> Tuple[List[bytes], bool]: + hash_bytes: bytes, count: int) -> tuple[list[bytes], bool]: """ Get sorted key list data from the timestamp/hash_bytes reference to the newest Return the elements (quantity is the 'count' parameter) and a boolean indicating if has more """ diff --git a/hathor/manager.py b/hathor/manager.py index 52232a4c7..24d718688 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -16,7 +16,7 @@ import sys import time from enum import Enum -from typing import Any, Iterable, Iterator, List, NamedTuple, Optional, Tuple, Union +from typing import Any, Iterable, Iterator, NamedTuple, Optional, Union from hathorlib.base_transaction import tx_or_block_from_bytes as lib_tx_or_block_from_bytes from structlog import get_logger @@ -95,8 +95,8 @@ def __init__(self, network: str, hostname: Optional[str] = None, wallet: Optional[BaseWallet] = None, - capabilities: Optional[List[str]] = None, - checkpoints: Optional[List[Checkpoint]] = None, + capabilities: Optional[list[str]] = None, + checkpoints: Optional[list[Checkpoint]] = None, rng: Optional[Random] = None, environment_info: Optional[EnvironmentInfo] = None, full_verification: bool = False, @@ -148,8 +148,8 @@ def __init__(self, self.cpu = cpu # XXX: first checkpoint must be genesis (height=0) - self.checkpoints: List[Checkpoint] = checkpoints or [] - self.checkpoints_ready: List[bool] = [False] * len(self.checkpoints) + self.checkpoints: list[Checkpoint] = checkpoints or [] + self.checkpoints_ready: list[bool] = [False] * len(self.checkpoints) if not self.checkpoints or self.checkpoints[0].height > 0: self.checkpoints.insert(0, Checkpoint(0, settings.GENESIS_BLOCK_HASH)) self.checkpoints_ready.insert(0, True) @@ -167,7 +167,7 @@ def __init__(self, self.consensus_algorithm = consensus_algorithm - self.peer_discoveries: List[PeerDiscovery] = [] + self.peer_discoveries: list[PeerDiscovery] = [] self.connections = p2p_manager @@ -194,14 +194,14 @@ def __init__(self, self.pow_thread_pool = ThreadPool(minthreads=0, maxthreads=settings.MAX_POW_THREADS, name='Pow thread pool') # List of addresses to listen for new connections (eg: [tcp:8000]) - self.listen_addresses: List[str] = [] + self.listen_addresses: list[str] = [] # Full verification execute all validations for transactions and blocks when initializing the node # Can be activated on the command line with --full-verification self._full_verification = full_verification # List of whitelisted peers - self.peers_whitelist: List[str] = [] + self.peers_whitelist: list[str] = [] # List of capabilities of the peer if capabilities is not None: @@ -526,7 +526,7 @@ def _initialize_components(self) -> None: # restart all validations possible if self.tx_storage.indexes.deps and self.tx_storage.indexes.deps.has_needed_tx(): self.log.debug('run pending validations') - depended_final_txs: List[BaseTransaction] = [] + depended_final_txs: list[BaseTransaction] = [] for tx_hash in self.tx_storage.indexes.deps.iter(): if not self.tx_storage.transaction_exists(tx_hash): continue @@ -686,7 +686,7 @@ def _sync_v2_resume_validations(self) -> None: assert self.tx_storage.indexes.deps is not None if self.tx_storage.indexes.deps.has_needed_tx(): self.log.debug('run pending validations') - depended_final_txs: List[BaseTransaction] = [] + depended_final_txs: list[BaseTransaction] = [] for tx_hash in self.tx_storage.indexes.deps.iter(): if not self.tx_storage.transaction_exists(tx_hash): continue @@ -702,7 +702,7 @@ def add_listen_address(self, addr: str) -> None: def add_peer_discovery(self, peer_discovery: PeerDiscovery) -> None: self.peer_discoveries.append(peer_discovery) - def get_new_tx_parents(self, timestamp: Optional[float] = None) -> List[VertexId]: + def get_new_tx_parents(self, timestamp: Optional[float] = None) -> list[VertexId]: """Select which transactions will be confirmed by a new transaction. :return: The hashes of the parents for a new transaction. @@ -722,7 +722,7 @@ def generate_parent_txs(self, timestamp: Optional[float]) -> 'ParentTxs': can_include_intervals = sorted(self.tx_storage.get_tx_tips(timestamp - 1)) assert can_include_intervals, 'tips cannot be empty' max_timestamp = max(int(i.begin) for i in can_include_intervals) - must_include: List[VertexId] = [] + must_include: list[VertexId] = [] assert len(can_include_intervals) > 0, f'invalid timestamp "{timestamp}", no tips found"' if len(can_include_intervals) < 2: # If there is only one tip, let's randomly choose one of its parents. @@ -782,14 +782,14 @@ def make_block_template(self, parent_block_hash: VertexId, timestamp: Optional[i current_timestamp = timestamp return self._make_block_template(parent_block, parent_txs, current_timestamp) - def make_custom_block_template(self, parent_block_hash: VertexId, parent_tx_hashes: List[VertexId], + def make_custom_block_template(self, parent_block_hash: VertexId, parent_tx_hashes: list[VertexId], timestamp: Optional[int] = None) -> BlockTemplate: """ Makes a block template using the given parent block and txs. """ parent_block = self.tx_storage.get_transaction(parent_block_hash) assert isinstance(parent_block, Block) # gather the actual txs to query their timestamps - parent_tx_list: List[Transaction] = [] + parent_tx_list: list[Transaction] = [] for tx_hash in parent_tx_hashes: tx = self.tx_storage.get_transaction(tx_hash) assert isinstance(tx, Transaction) @@ -1178,7 +1178,7 @@ def has_recent_activity(self) -> bool: return True - def is_healthy(self) -> Tuple[bool, Optional[str]]: + def is_healthy(self) -> tuple[bool, Optional[str]]: if not self.has_recent_activity(): return False, HathorManager.UnhealthinessReason.NO_RECENT_ACTIVITY @@ -1217,10 +1217,10 @@ class ParentTxs(NamedTuple): included. """ max_timestamp: int - can_include: List[VertexId] - must_include: List[VertexId] + can_include: list[VertexId] + must_include: list[VertexId] - def get_random_parents(self, rng: Random) -> Tuple[VertexId, VertexId]: + def get_random_parents(self, rng: Random) -> tuple[VertexId, VertexId]: """ Get parents from self.parents plus a random choice from self.parents_any to make it 3 in total. Using tuple as return type to make it explicit that the length is always 2. @@ -1230,6 +1230,6 @@ def get_random_parents(self, rng: Random) -> Tuple[VertexId, VertexId]: p1, p2 = self.must_include[:] + fill return p1, p2 - def get_all_tips(self) -> List[VertexId]: + def get_all_tips(self) -> list[VertexId]: """All generated "tips", can_include + must_include.""" return self.must_include + self.can_include diff --git a/hathor/merged_mining/bitcoin.py b/hathor/merged_mining/bitcoin.py index 3eb2e578b..90765e6bc 100644 --- a/hathor/merged_mining/bitcoin.py +++ b/hathor/merged_mining/bitcoin.py @@ -13,7 +13,7 @@ # limitations under the License. import struct -from typing import Dict, List, NamedTuple, Sequence, Tuple, Union +from typing import NamedTuple, Sequence, Union class BitcoinRawTransaction(NamedTuple): @@ -22,7 +22,7 @@ class BitcoinRawTransaction(NamedTuple): data: bytes @classmethod - def from_dict(cls, data: Dict) -> 'BitcoinRawTransaction': + def from_dict(cls, data: dict) -> 'BitcoinRawTransaction': return cls(bytes.fromhex(data['hash']), bytes.fromhex(data['txid']), bytes.fromhex(data['data'])) def __bytes__(self) -> bytes: @@ -64,7 +64,7 @@ def __bytes__(self) -> bytes: ]) @classmethod - def from_dict(cls, params: Dict) -> 'BitcoinBlockHeader': + def from_dict(cls, params: dict) -> 'BitcoinBlockHeader': r""" Convert from dict of the properties returned from Bitcoin RPC. Examples: @@ -127,7 +127,7 @@ def _merkle_concat(left: bytes, right: bytes) -> bytes: return bytes(reversed(left)) + bytes(reversed(right)) -def build_merkle_path_for_coinbase(merkle_leaves: List[bytes]) -> List[bytes]: +def build_merkle_path_for_coinbase(merkle_leaves: list[bytes]) -> list[bytes]: """ Return the merkle path (unidirectional since it's a list) to the coinbase (not included) from hash leaves. >>> tx_list = [bytes.fromhex(tx) for tx in [ @@ -159,7 +159,7 @@ def build_merkle_path_for_coinbase(merkle_leaves: List[bytes]) -> List[bytes]: return _build_merkle_path_for_coinbase([b''] + merkle_leaves) -def _build_merkle_path_for_coinbase(merkle_leaves: List[bytes], _partial_path: List[bytes] = []) -> List[bytes]: +def _build_merkle_path_for_coinbase(merkle_leaves: list[bytes], _partial_path: list[bytes] = []) -> list[bytes]: """ Internal implementation of `build_merkle_path_for_coinbase`, assumes first `merkle_leave` is the coinbase. """ merkle_leaves = merkle_leaves[:] # copy to preserve original @@ -181,7 +181,7 @@ def _build_merkle_path_for_coinbase(merkle_leaves: List[bytes], _partial_path: L ) -def build_merkle_root(merkle_leaves: List[bytes]) -> bytes: +def build_merkle_root(merkle_leaves: list[bytes]) -> bytes: """ Return the merkle root hash from hash leaves. >>> build_merkle_root([bytes.fromhex(tx) for tx in [ @@ -216,7 +216,7 @@ def build_merkle_root(merkle_leaves: List[bytes]) -> bytes: return build_merkle_root([sha256d_hash(_merkle_concat(a, b)) for a, b in zip(iter_leaves, iter_leaves)]) -def build_merkle_root_from_path(merkle_path: List[bytes]) -> bytes: +def build_merkle_root_from_path(merkle_path: list[bytes]) -> bytes: """ Return the merkle root hash from a given unidirectional (all right) merkle path. Useful for computing merkle root given the merkle path to the coinbase (including the coinbase tx). @@ -292,7 +292,7 @@ class BitcoinTransactionInput(NamedTuple): # Transaction version as defined by the sender. Intended for "replacement" of transactions when information is # updated before inclusion into a block. sequence: int = SEQUENCE_FINAL # default value disables nLockTime - script_witness: List[bytes] = [] + script_witness: list[bytes] = [] def __bytes__(self) -> bytes: """ Convert to byte representation of the header. @@ -328,7 +328,7 @@ def has_witness(self) -> bool: return bool(self.script_witness) @classmethod - def from_dict(cls, params: Dict) -> 'BitcoinTransactionInput': + def from_dict(cls, params: dict) -> 'BitcoinTransactionInput': r""" Convert from dict of the properties returned from Bitcoin RPC. Examples: @@ -387,7 +387,7 @@ def __bytes__(self) -> bytes: return struct.pack(' 'BitcoinTransactionOutput': + def from_dict(cls, params: dict) -> 'BitcoinTransactionOutput': r""" Convert from dict of the properties returned from Bitcoin RPC. Examples: @@ -425,8 +425,8 @@ def from_dict(cls, params: Dict) -> 'BitcoinTransactionOutput': class BitcoinTransaction(NamedTuple): version: int = 1 # Transaction data format version (note, this is signed) include_witness: bool = True # Whether to include the witness flag (0001) - inputs: List[BitcoinTransactionInput] = [] # A list of 1 or more transaction inputs or sources for coins - outputs: List[BitcoinTransactionOutput] = [] # A list of 1 or more transaction outputs or destinations for coins + inputs: list[BitcoinTransactionInput] = [] # A list of 1 or more transaction inputs or sources for coins + outputs: list[BitcoinTransactionOutput] = [] # A list of 1 or more transaction outputs or destinations for coins lock_time: int = 0 # The block number or timestamp at which this transaction is unlocked def __bytes__(self) -> bytes: @@ -472,7 +472,7 @@ def to_raw(self) -> BitcoinRawTransaction: return BitcoinRawTransaction(self.hash, self.txid, bytes(self)) @property - def tx_witnesses(self) -> List[List[bytes]]: + def tx_witnesses(self) -> list[list[bytes]]: """ List of witnesses list: each input yields a list. """ return [i.script_witness or [b'\00' * 32] for i in self.inputs] @@ -490,7 +490,7 @@ def txid(self) -> bytes: return sha256d_hash(self._to_bytes(skip_segwit=True)) @classmethod - def from_dict(cls, params: Dict) -> 'BitcoinTransaction': + def from_dict(cls, params: dict) -> 'BitcoinTransaction': r""" Convert from dict of the properties returned from Bitcoin RPC. Examples: @@ -777,7 +777,7 @@ def encode_list(buffer: Sequence[bytes]) -> bytes: return encode_varint(len(buffer)) + b''.join(buffer) -def encode_bytearray_list(buffer: List[bytes]) -> bytes: +def encode_bytearray_list(buffer: list[bytes]) -> bytes: """ Variable length list encoding of bytes """ return encode_varint(len(buffer)) + b''.join(map(encode_bytearray, buffer)) @@ -863,7 +863,7 @@ def read_nrevbytes(buffer: bytearray, length: int) -> bytes: return array -def read_input(buffer: bytearray, witnesses: List[bytes] = []) -> BitcoinTransactionInput: +def read_input(buffer: bytearray, witnesses: list[bytes] = []) -> BitcoinTransactionInput: """ Parse a single input, read bytes are consumed. """ outpoint = read_outpoint(buffer) @@ -872,7 +872,7 @@ def read_input(buffer: bytearray, witnesses: List[bytes] = []) -> BitcoinTransac return BitcoinTransactionInput(outpoint, script_sig, sequence, witnesses) -def read_witnesses(buffer: bytearray, input_count: int, witnesses_offset: int) -> List[List[bytes]]: +def read_witnesses(buffer: bytearray, input_count: int, witnesses_offset: int) -> list[list[bytes]]: """ Parse the list of witnesses, a list for each input, read bytes are consumed. """ witnesses_buf = buffer[witnesses_offset:] @@ -887,7 +887,7 @@ def read_witnesses(buffer: bytearray, input_count: int, witnesses_offset: int) - return witnesses_per_input -def read_inputs(buffer: bytearray, with_witnesses: bool) -> List[BitcoinTransactionInput]: +def read_inputs(buffer: bytearray, with_witnesses: bool) -> list[BitcoinTransactionInput]: """ Parse a list of inputs, read bytes are consumed. Optionally include witnesses. """ if with_witnesses: @@ -915,7 +915,7 @@ def read_output(buffer: bytearray) -> BitcoinTransactionOutput: return BitcoinTransactionOutput(value, script) -def read_outputs(buffer: bytearray) -> List[BitcoinTransactionOutput]: +def read_outputs(buffer: bytearray) -> list[BitcoinTransactionOutput]: """ Parse a list of outputs, read bytes are consumed. """ count = read_varint(buffer) @@ -925,7 +925,7 @@ def read_outputs(buffer: bytearray) -> List[BitcoinTransactionOutput]: return outputs -def skip_inputs(buffer: bytearray) -> Tuple[int, int]: +def skip_inputs(buffer: bytearray) -> tuple[int, int]: """ Return the number of bytes read and count of inputs, but don't consume any byte. """ buffer2 = buffer.copy() @@ -933,7 +933,7 @@ def skip_inputs(buffer: bytearray) -> Tuple[int, int]: return len(buffer) - len(buffer2), len(inputs) -def skip_outputs(buffer: bytearray) -> Tuple[int, int]: +def skip_outputs(buffer: bytearray) -> tuple[int, int]: """ Return the number of bytes read and count of outputs, but don't consume any byte. """ buffer2 = buffer.copy() diff --git a/hathor/merged_mining/bitcoin_rpc.py b/hathor/merged_mining/bitcoin_rpc.py index 9b22f4b78..3953e69bd 100644 --- a/hathor/merged_mining/bitcoin_rpc.py +++ b/hathor/merged_mining/bitcoin_rpc.py @@ -14,7 +14,7 @@ from abc import ABC, abstractmethod from itertools import count -from typing import Any, Callable, Dict, Iterator, List, Optional, Union, cast +from typing import Any, Callable, Iterator, Optional, Union, cast from aiohttp import BasicAuth, ClientSession from structlog import get_logger @@ -30,9 +30,9 @@ def __init__(self, message: str, code: Optional[int] = None): class IBitcoinRPC(ABC): @abstractmethod - async def get_block_template(self, *, rules: List[str] = ['segwit'], longpoll_id: Optional[str], - capabilities: List[str] = ['coinbasetxn', 'workid', 'coinbase/append', 'longpoll'], - ) -> Dict: + async def get_block_template(self, *, rules: list[str] = ['segwit'], longpoll_id: Optional[str], + capabilities: list[str] = ['coinbasetxn', 'workid', 'coinbase/append', 'longpoll'], + ) -> dict: """ Method for the [GetBlockTemplate call](https://bitcoin.org/en/developer-reference#getblocktemplate). """ raise NotImplementedError @@ -114,7 +114,7 @@ async def _rpc_request(self, method: str, *args: Any, **kwargs: Any) -> Any: `{"id": 0, "method": "getblocktemplate", "params": {"template_request": {"capabilities": ["coinbasetxn"]}}}` """ assert bool(args) + bool(kwargs) < 2, 'Use at most one of: args or kwargs, but not both' - req_data: Dict = {'method': method} + req_data: dict = {'method': method} if self._iter_id: req_data['id'] = str(next(self._iter_id)) params = args or kwargs or None @@ -137,14 +137,14 @@ async def _rpc_request(self, method: str, *args: Any, **kwargs: Any) -> Any: raise RPCFailure(res_data['error']['message'], res_data['error']['code']) return res_data['result'] - async def get_block_template(self, *, rules: List[str] = ['segwit'], longpoll_id: Optional[str], - capabilities: List[str] = ['coinbasetxn', 'workid', 'coinbase/append', 'longpoll'], - ) -> Dict: - data: Dict[str, Any] = {'capabilities': capabilities, 'rules': rules} + async def get_block_template(self, *, rules: list[str] = ['segwit'], longpoll_id: Optional[str], + capabilities: list[str] = ['coinbasetxn', 'workid', 'coinbase/append', 'longpoll'], + ) -> dict: + data: dict[str, Any] = {'capabilities': capabilities, 'rules': rules} if longpoll_id is not None: data['longpollid'] = longpoll_id res = await self._rpc_request('getblocktemplate', data) - return cast(Dict[str, Any], res) + return cast(dict[str, Any], res) async def verify_block_proposal(self, *, block: bytes) -> Optional[str]: res = await self._rpc_request('getblocktemplate', {'mode': 'proposal', 'data': block.hex()}) diff --git a/hathor/merged_mining/coordinator.py b/hathor/merged_mining/coordinator.py index ec748857e..0c32cf985 100644 --- a/hathor/merged_mining/coordinator.py +++ b/hathor/merged_mining/coordinator.py @@ -21,7 +21,7 @@ import asyncio import time from itertools import count -from typing import Any, Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Union +from typing import Any, Callable, Iterator, NamedTuple, Optional, Union from uuid import uuid4 import aiohttp @@ -80,7 +80,7 @@ class HathorCoordJob(NamedTuple): block: HathorBlock height: Optional[int] - def to_dict(self) -> Dict[Any, Any]: + def to_dict(self) -> dict[Any, Any]: d = self.block.to_json() d['height'] = self.height return d @@ -104,7 +104,7 @@ def flip80(data: bytes) -> bytes: return b''.join(x[::-1] for x in ichunks(data, 4)) -def parse_login_with_addresses(login: str) -> Tuple[bytes, bytes, Optional[str]]: +def parse_login_with_addresses(login: str) -> tuple[bytes, bytes, Optional[str]]: """ Parses a login of the form HATHOR_ADDRESS.BITCOIN_ADDRESS[.WORKER_NAME] returns output scripts and worker name. Examples: @@ -141,7 +141,7 @@ class SingleMinerWork(NamedTuple): timestamp: Optional[int] = None @classmethod - def from_stratum_params(cls, xnonce1: bytes, params: List) -> 'SingleMinerWork': + def from_stratum_params(cls, xnonce1: bytes, params: list) -> 'SingleMinerWork': """ Parse params received from Stratum and instantiate work accordingly. """ from hathor.merged_mining.bitcoin import read_uint32 @@ -174,19 +174,19 @@ class SingleMinerJob(NamedTuple): prev_hash: bytes coinbase_head: bytes coinbase_tail: bytes - merkle_path: Tuple[bytes, ...] + merkle_path: tuple[bytes, ...] version: int bits: bytes # 4 bytes timestamp: int hathor_block: HathorBlock - transactions: List[BitcoinRawTransaction] + transactions: list[BitcoinRawTransaction] xnonce1: bytes xnonce2_size: int clean: bool = True bitcoin_height: int = 0 hathor_height: Optional[int] = None - def to_stratum_params(self) -> List: + def to_stratum_params(self) -> list: """ Assemble the parameters the way a Stratum client typically expects. """ return [ @@ -206,7 +206,7 @@ def _make_coinbase(self, work: SingleMinerWork) -> BitcoinTransaction: """ return BitcoinTransaction.decode(b''.join([self.coinbase_head, work.xnonce, self.coinbase_tail])) - def _make_bitcoin_block_and_coinbase(self, work: SingleMinerWork) -> Tuple[BitcoinBlockHeader, BitcoinTransaction]: + def _make_bitcoin_block_and_coinbase(self, work: SingleMinerWork) -> tuple[BitcoinBlockHeader, BitcoinTransaction]: """ Assemble the Bitcoin block header and coinbase transaction from this job and a given work. """ coinbase_tx = self._make_coinbase(work) @@ -290,8 +290,8 @@ def __init__(self, coordinator: 'MergedMiningCoordinator', xnonce1: bytes = b'', self.worker_name: Optional[str] = None self.login: Optional[str] = None # used to estimate the miner's hashrate, items are a tuple (timestamp, logwork) - self._submitted_work: List[Tuple[float, Weight, Hash]] = [] - self._new_submitted_work: List[Tuple[float, Weight, Hash]] = [] + self._submitted_work: list[tuple[float, Weight, Hash]] = [] + self._new_submitted_work: list[tuple[float, Weight, Hash]] = [] self.last_reduce = 0.0 self._estimator_last_len = 0 self.hashrate_ths: Optional[float] = None @@ -322,7 +322,7 @@ def uptime(self) -> float: return 0.0 return time.time() - self.subscribed_at - def status(self) -> Dict[Any, Any]: + def status(self) -> dict[Any, Any]: """ Build status dict with useful metrics for use in MM Status API. """ return { @@ -399,7 +399,7 @@ def line_received(self, message: bytes) -> None: assert isinstance(data, dict) self.json_received(data) - def json_received(self, data: Dict[Any, Any]) -> None: + def json_received(self, data: dict[Any, Any]) -> None: """ Process JSON and forward to the appropriate handle, usually `handle_request`. """ msgid = data.get('id') @@ -420,10 +420,10 @@ def json_received(self, data: Dict[Any, Any]) -> None: 'error': 'Could not identify message as request, result or error.' }) - def send_request(self, method: str, params: Union[None, List, Dict], msgid: Union[str, int, None] = None) -> None: + def send_request(self, method: str, params: Union[None, list, dict], msgid: Union[str, int, None] = None) -> None: """ Sends a JSON-RPC 2.0 request. """ - data: Dict[str, Any] = {'method': method, 'params': params} + data: dict[str, Any] = {'method': method, 'params': params} # XXX: keeping the same msgid type the client sent data['id'] = msgid self.log.debug('send request', data=data) @@ -438,7 +438,7 @@ def send_result(self, result: Any, msgid: Optional[str]) -> None: self.log.debug('send result', data=data) return self.send_json(data) - def send_error(self, error: Dict, msgid: Optional[str] = None, data: Any = None) -> None: + def send_error(self, error: dict, msgid: Optional[str] = None, data: Any = None) -> None: """ Sends a JSON-RPC 2.0 error. """ message = {'error': error, 'data': data} @@ -451,7 +451,7 @@ def send_error(self, error: Dict, msgid: Optional[str] = None, data: Any = None) if error['code'] <= UNRECOVERABLE_ERROR_CODE_MAX and self.transport is not None: self.transport.close() - def send_json(self, json: Dict) -> None: + def send_json(self, json: dict) -> None: """ Encodes a JSON and send it through the LineReceiver interface. """ from hathor.util import json_dumpb @@ -462,14 +462,14 @@ def send_json(self, json: Dict) -> None: except TypeError: self.log.error('failed to encode', json=json) - def handle_request(self, method: str, params: Optional[Union[List, Dict]], msgid: Optional[str]) -> None: + def handle_request(self, method: str, params: Optional[Union[list, dict]], msgid: Optional[str]) -> None: """ Handles subscribe and submit requests. :param method: JSON-RPC 2.0 request method :type method: str :param params: JSON-RPC 2.0 request params - :type params: Optional[Union[List, Dict]] + :type params: Optional[Union[list, dict]] :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[str] @@ -477,19 +477,19 @@ def handle_request(self, method: str, params: Optional[Union[List, Dict]], msgid self.log.debug('handle request', method=method, params=params) if method in {'subscribe', 'mining.subscribe', 'login'}: - assert isinstance(params, List) + assert isinstance(params, list) return self.handle_subscribe(params, msgid) if method in {'authorize', 'mining.authorize'}: - assert isinstance(params, List) + assert isinstance(params, list) return self.handle_authorize(params, msgid) if method in {'submit', 'mining.submit'}: - assert isinstance(params, List) + assert isinstance(params, list) return self.handle_submit(params, msgid) if method in {'configure', 'mining.configure'}: - assert isinstance(params, List) + assert isinstance(params, list) return self.handle_configure(params, msgid) if method in {'multi_version', 'mining.multi_version'}: - assert isinstance(params, List) + assert isinstance(params, list) return self.handle_multi_version(params, msgid) if method == 'mining.extranonce.subscribe': return self.handle_extranonce_subscribe(msgid) @@ -501,12 +501,12 @@ def handle_result(self, result: Any, msgid: Optional[str]) -> None: """ self.log.debug('handle result', msgid=msgid, result=result) - def handle_error(self, error: Dict, data: Any, msgid: Optional[str]) -> None: + def handle_error(self, error: dict, data: Any, msgid: Optional[str]) -> None: """ Logs any errors since there are not supposed to be any. """ self.log.error('handle error', msgid=msgid, error=error) - def handle_authorize(self, params: List, msgid: Optional[str]) -> None: + def handle_authorize(self, params: list, msgid: Optional[str]) -> None: """ Handles authorize request by always authorizing even if the request is invalid. """ if self.coordinator.address_from_login: @@ -538,7 +538,7 @@ def handle_authorize(self, params: List, msgid: Optional[str]) -> None: self.job_request() self.start_estimator() - def handle_configure(self, params: List, msgid: Optional[str]) -> None: + def handle_configure(self, params: list, msgid: Optional[str]) -> None: """ Handles stratum-extensions configuration See: https://github.com/slushpool/stratumprotocol/blob/master/stratum-extensions.mediawiki @@ -553,7 +553,7 @@ def handle_configure(self, params: List, msgid: Optional[str]) -> None: self.send_result(res, msgid) - def handle_subscribe(self, params: List[str], msgid: Optional[str]) -> None: + def handle_subscribe(self, params: list[str], msgid: Optional[str]) -> None: """ Handles subscribe request by answering it and triggering a job request. :param msgid: JSON-RPC 2.0 message id @@ -585,7 +585,7 @@ def handle_subscribe(self, params: List[str], msgid: Optional[str]) -> None: if self._authorized: self.set_difficulty(self.initial_difficulty) - def handle_multi_version(self, params: List[Any], msgid: Optional[str]) -> None: + def handle_multi_version(self, params: list[Any], msgid: Optional[str]) -> None: """ Handles multi_version requests """ self.send_result(True, msgid) @@ -595,7 +595,7 @@ def handle_extranonce_subscribe(self, msgid: Optional[str]) -> None: """ self.send_result(True, msgid) - def handle_submit(self, params: List[Any], msgid: Optional[str]) -> None: + def handle_submit(self, params: list[Any], msgid: Optional[str]) -> None: """ Handles submit request by validating and propagating the result - params: rpc_user, job_id, xnonce2, time, nonce @@ -796,8 +796,8 @@ class BitcoinCoordJob(NamedTuple): size_limit: int bits: bytes height: int - transactions: List[BitcoinRawTransaction] - merkle_path: Tuple[bytes, ...] + transactions: list[BitcoinRawTransaction] + merkle_path: tuple[bytes, ...] witness_commitment: Optional[bytes] = None append_to_input: bool = True @@ -922,7 +922,7 @@ def from_dict(cls, params: dict) -> 'BitcoinCoordJob': bytes.fromhex(segwit_commitment) if segwit_commitment is not None else None, ) - def to_dict(self) -> Dict[Any, Any]: + def to_dict(self) -> dict[Any, Any]: """ Convert back to a simplified dict format similar to Bitcoin's, used by MM Status API. """ return { @@ -951,7 +951,7 @@ def make_coinbase_transaction(self, hathor_block_hash: bytes, payback_script_bit import struct inputs = [] - outputs: List[BitcoinTransactionOutput] = [] + outputs: list[BitcoinTransactionOutput] = [] # coinbase input coinbase_script = encode_bytearray(struct.pack(' None: +def strip_transactions(data: dict, rm_cond: Callable[[dict], bool]) -> None: """ Remove all transactions from gbt data for which rm_cond returns True. """ selected_txs = [] excluded_txs = [] @@ -1126,8 +1126,8 @@ def __init__(self, bitcoin_rpc: IBitcoinRPC, hathor_client: IHathorClient, self.hathor_client = hathor_client self.hathor_mining: Optional[IMiningChannel] = None self.address_from_login = address_from_login - self.jobs: Set[SingleMinerJob] = set() - self.miner_protocols: Dict[str, MergedMiningStratumProtocol] = {} + self.jobs: set[SingleMinerJob] = set() + self.miner_protocols: dict[str, MergedMiningStratumProtocol] = {} self.payback_address_bitcoin: Optional[str] = payback_address_bitcoin self.payback_address_hathor: Optional[str] = payback_address_hathor self.bitcoin_coord_job: Optional[BitcoinCoordJob] = None @@ -1360,7 +1360,7 @@ async def update_merged_block(self) -> None: self.update_jobs() self.log.debug('merged job updated') - def status(self) -> Dict[Any, Any]: + def status(self) -> dict[Any, Any]: """ Build status dict with useful metrics for use in MM Status API. """ miners = [p.status() for p in self.miner_protocols.values()] diff --git a/hathor/merged_mining/util.py b/hathor/merged_mining/util.py index 9cedf8ef6..786c4f351 100644 --- a/hathor/merged_mining/util.py +++ b/hathor/merged_mining/util.py @@ -15,7 +15,7 @@ import asyncio from contextlib import suppress from functools import wraps -from typing import Any, Awaitable, Callable, Dict, Optional, Tuple, Union +from typing import Any, Awaitable, Callable, Optional, Union from structlog import get_logger from twisted.internet.defer import Deferred, ensureDeferred @@ -34,8 +34,8 @@ class Periodic: def __init__(self, afunc: Callable[..., Awaitable[None]], interval: Union[int, float], - args: Tuple = (), - kwargs: Dict = {}): + args: tuple = (), + kwargs: dict = {}): """ Create Periodic instance from async function, `interval` is in seconds. """ self.afunc = afunc diff --git a/hathor/metrics.py b/hathor/metrics.py index cf56836ed..c8a780000 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -14,7 +14,7 @@ from collections import deque from dataclasses import dataclass, field -from typing import TYPE_CHECKING, Deque, Dict, List, NamedTuple, Optional +from typing import TYPE_CHECKING, NamedTuple, Optional from structlog import get_logger from twisted.internet.task import LoopingCall @@ -97,11 +97,11 @@ class Metrics: estimated_hash_rate: float = 0 # log(H/s) stratum_factory: Optional['StratumFactory'] = None # Peer Connection data - peer_connection_metrics: List[PeerConnectionMetrics] = field(default_factory=list) + peer_connection_metrics: list[PeerConnectionMetrics] = field(default_factory=list) # Send-token timeouts counter send_token_timeouts: int = 0 # Dict that stores the sizes of each column-family in RocksDB, in bytes - rocksdb_cfs_sizes: Dict[bytes, float] = field(default_factory=dict) + rocksdb_cfs_sizes: dict[bytes, float] = field(default_factory=dict) # TxCache Data transaction_cache_hits: int = 0 transaction_cache_misses: int = 0 @@ -123,10 +123,10 @@ def __post_init__(self) -> None: self.log = logger.new() # Stores caculated tx weights saved in tx storage - self.weight_tx_deque: Deque[WeightValue] = deque(maxlen=self.weight_tx_deque_len) + self.weight_tx_deque: deque[WeightValue] = deque(maxlen=self.weight_tx_deque_len) # Stores caculated block weights saved in tx storage - self.weight_block_deque: Deque[WeightValue] = deque(maxlen=self.weight_block_deque_len) + self.weight_block_deque: deque[WeightValue] = deque(maxlen=self.weight_block_deque_len) if self.reactor is None: from hathor.util import reactor as twisted_reactor diff --git a/hathor/mining/block_template.py b/hathor/mining/block_template.py index 384823b96..292a29b22 100644 --- a/hathor/mining/block_template.py +++ b/hathor/mining/block_template.py @@ -16,7 +16,7 @@ Module for abstractions around generating mining templates. """ -from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Type, Union +from typing import Iterable, NamedTuple, Optional, Union from hathor.transaction import BaseTransaction, Block, MergeMinedBlock from hathor.transaction.storage import TransactionStorage @@ -24,14 +24,14 @@ class BlockTemplate(NamedTuple): - versions: Set[int] + versions: set[int] reward: int # reward unit value, 64.00 HTR is 6400 weight: float # calculated from the DAA timestamp_now: int # the reference timestamp the template was generated for timestamp_min: int # min valid timestamp timestamp_max: int # max valid timestamp - parents: List[bytes] # required parents, will always have a block and at most 2 txs - parents_any: List[bytes] # list of extra parents to choose from when there are more options + parents: list[bytes] # required parents, will always have a block and at most 2 txs + parents_any: list[bytes] # list of extra parents to choose from when there are more options height: int # metadata score: float # metadata @@ -62,7 +62,7 @@ def generate_mining_block(self, rng: Random, merge_mined: bool = False, address: base_timestamp = timestamp if timestamp is not None else self.timestamp_now block_timestamp = min(max(base_timestamp, self.timestamp_min), self.timestamp_max) tx_outputs = [TxOutput(self.reward, output_script)] - cls: Union[Type['Block'], Type['MergeMinedBlock']] = MergeMinedBlock if merge_mined else Block + cls: Union[type['Block'], type['MergeMinedBlock']] = MergeMinedBlock if merge_mined else Block block = cls(outputs=tx_outputs, parents=parents, timestamp=block_timestamp, data=data or b'', storage=storage, weight=self.weight) if include_metadata: @@ -70,7 +70,7 @@ def generate_mining_block(self, rng: Random, merge_mined: bool = False, address: block.get_metadata(use_storage=False) return block - def get_random_parents(self, rng: Random) -> Tuple[bytes, bytes, bytes]: + def get_random_parents(self, rng: Random) -> tuple[bytes, bytes, bytes]: """ Get parents from self.parents plus a random choice from self.parents_any to make it 3 in total. Return type is tuple just to make it clear that the length is always 3. @@ -80,7 +80,7 @@ def get_random_parents(self, rng: Random) -> Tuple[bytes, bytes, bytes]: p1, p2, p3 = self.parents[:] + more_parents return p1, p2, p3 - def to_dict(self) -> Dict: + def to_dict(self) -> dict: return { 'data': self.generate_minimaly_valid_block().get_struct_without_nonce().hex(), 'versions': sorted(self.versions), @@ -96,7 +96,7 @@ def to_dict(self) -> Dict: } @classmethod - def from_dict(cls, data: Dict) -> 'BlockTemplate': + def from_dict(cls, data: dict) -> 'BlockTemplate': return cls( versions=set(data['versions']), reward=int(data['reward']), @@ -111,7 +111,7 @@ def from_dict(cls, data: Dict) -> 'BlockTemplate': ) -class BlockTemplates(List[BlockTemplate]): +class BlockTemplates(list[BlockTemplate]): def __init__(self, templates: Iterable[BlockTemplate], storage: Optional[TransactionStorage] = None): super().__init__(templates) self.storage = storage diff --git a/hathor/mining/ws.py b/hathor/mining/ws.py index 1668c1e91..acd040ace 100644 --- a/hathor/mining/ws.py +++ b/hathor/mining/ws.py @@ -17,7 +17,7 @@ """ from json import JSONDecodeError -from typing import Any, Dict, List, NamedTuple, Optional, Set, Union +from typing import Any, NamedTuple, Optional, Union from autobahn.twisted.websocket import WebSocketServerFactory, WebSocketServerProtocol from structlog import get_logger @@ -32,7 +32,7 @@ settings = HathorSettings() JsonRpcId = Union[str, int, float] -JsonValue = Optional[Union[Dict[str, Any], List[Any], str, int, float]] +JsonValue = Optional[Union[dict[str, Any], list[Any], str, int, float]] class JsonRpcError(NamedTuple): @@ -64,7 +64,7 @@ class JsonRpcWebsocketServerProtocol(WebSocketServerProtocol): def onMessage(self, payload: bytes, isBinary: bool) -> None: self.log.info('message', payload=payload) try: - data: Union[List[Dict], Dict] = json_loadb(payload) + data: Union[list[dict], dict] = json_loadb(payload) except JSONDecodeError: return self.send_response(error=JSON_RPC_PARSE_ERROR) try: @@ -77,11 +77,11 @@ def onMessage(self, payload: bytes, isBinary: bool) -> None: self.log.warn('internal error', exc_info=True) return self.send_response(error=JSON_RPC_INTERNAL_ERROR) - def _handle_request(self, data: Dict) -> None: + def _handle_request(self, data: dict) -> None: try: id = data.get('id') method_name = data['method'].replace('.', '_') - params: Union[Dict[str, Any], List[Any]] = data.get('params', []) + params: Union[dict[str, Any], list[Any]] = data.get('params', []) except (KeyError, ValueError): return self.send_response(error=JSON_RPC_INVALID_REQUEST) try: @@ -107,7 +107,7 @@ def send_response(self, *, id: Optional[JsonRpcId] = None, result: Optional[JsonValue] = None, error: Optional[JsonRpcError] = None) -> None: - response: Dict[str, JsonValue] = { + response: dict[str, JsonValue] = { 'id': id, 'error': None, 'result': None, @@ -122,8 +122,8 @@ def send_response(self, *, if error is not None and error.fatal: self.sendClose() - def send_notification(self, *, method: str, params: Union[List, Dict]) -> None: - request: Dict[str, JsonValue] = { + def send_notification(self, *, method: str, params: Union[list, dict]) -> None: + request: dict[str, JsonValue] = { 'id': None, 'method': method, } @@ -155,13 +155,13 @@ def onClose(self, wasClean, code, reason): self.factory.connections.remove(self) self._open = False - def do_mining_refresh(self) -> List[Dict]: + def do_mining_refresh(self) -> list[dict]: if not self.factory.manager.can_start_mining(): self.log.warn('node syncing') return [] return self.factory.get_block_templates() - def do_mining_submit(self, hexdata: str, optimistic: bool = False) -> Union[bool, Dict]: + def do_mining_submit(self, hexdata: str, optimistic: bool = False) -> Union[bool, dict]: if not self.factory.manager.can_start_mining(): self.log.warn('node syncing') return False @@ -181,13 +181,13 @@ class MiningWebsocketFactory(WebSocketServerFactory): """ protocol = MiningWebsocketProtocol - connections: Set[MiningWebsocketProtocol] + connections: set[MiningWebsocketProtocol] def __init__(self, manager: HathorManager): super().__init__() self.connections = set() self.manager = manager - self._last_broadcast: List[Dict] = [] + self._last_broadcast: list[dict] = [] manager.pubsub.subscribe(HathorEvents.NETWORK_NEW_TX_ACCEPTED, self._on_new_tx) def buildProtocol(self, addr): @@ -202,12 +202,12 @@ def _on_new_tx(self, key: HathorEvents, args: EventArguments) -> None: self.broadcast_notification(method='mining.notify', params=block_templates) self._last_broadcast = block_templates - def get_block_templates(self) -> List[Dict]: + def get_block_templates(self) -> list[dict]: """Serialized manager.get_block_templates()""" block_templates = self.manager.get_block_templates() return [t.to_dict() for t in block_templates] - def broadcast_notification(self, *, method: str, params: Union[List, Dict]) -> None: + def broadcast_notification(self, *, method: str, params: Union[list, dict]) -> None: """ Broadcast notification to all connections """ for conn in self.connections: diff --git a/hathor/p2p/downloader.py b/hathor/p2p/downloader.py index 1dfac1d2e..cbff9a3ff 100644 --- a/hathor/p2p/downloader.py +++ b/hathor/p2p/downloader.py @@ -14,7 +14,7 @@ from collections import deque from functools import partial -from typing import TYPE_CHECKING, Any, Deque, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional from structlog import get_logger from twisted.internet import defer @@ -42,7 +42,7 @@ class TxDetails: deferred: Deferred # List of connections that requested this transaction. - connections: List['NodeSyncTimestamp'] + connections: list['NodeSyncTimestamp'] # This will be resolved after the transaction has been downloaded, # but not necessarily added to the DAG. @@ -52,7 +52,7 @@ class TxDetails: # Useful when we need to retry the request and want to select a new connection requested_index: int - def __init__(self, tx_id: bytes, deferred: Deferred, connections: List['NodeSyncTimestamp']): + def __init__(self, tx_id: bytes, deferred: Deferred, connections: list['NodeSyncTimestamp']): self.log = logger.new() self.tx_id = tx_id self.deferred = deferred @@ -130,17 +130,17 @@ class Downloader: """ # All transactions that must be downloaded. - pending_transactions: Dict[bytes, TxDetails] + pending_transactions: dict[bytes, TxDetails] # Transactions waiting to be downloaded. - waiting_deque: Deque[bytes] + waiting_deque: deque[bytes] # Transactions that are being downloaded. - downloading_deque: Deque[bytes] + downloading_deque: deque[bytes] # Transactions that have been downloaded but are not ready to be # added to the DAG. - downloading_buffer: Dict[bytes, 'BaseTransaction'] + downloading_buffer: dict[bytes, 'BaseTransaction'] # Size of the sliding window used to download transactions. window_size: int @@ -157,7 +157,7 @@ def __init__(self, manager: 'HathorManager', window_size: int = 100): def drop_connection(self, connection: 'NodeSyncTimestamp') -> None: """ Called when a peer is disconnected to remove that connection from all the affected pending transactions.""" - to_remove: List[bytes] = [] + to_remove: list[bytes] = [] for tx_details in self.pending_transactions.values(): if tx_details.drop_connection(connection): to_remove.append(tx_details.tx_id) diff --git a/hathor/p2p/factory.py b/hathor/p2p/factory.py index aa55b0b2d..da02328d3 100644 --- a/hathor/p2p/factory.py +++ b/hathor/p2p/factory.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Optional, Type +from typing import TYPE_CHECKING, Optional from twisted.internet import protocol from twisted.internet.interfaces import IAddress @@ -33,7 +33,7 @@ class HathorServerFactory(protocol.ServerFactory): """ manager: Optional[ConnectionsManager] - protocol: Type[MyServerProtocol] = MyServerProtocol + protocol: type[MyServerProtocol] = MyServerProtocol def __init__( self, @@ -66,7 +66,7 @@ class HathorClientFactory(protocol.ClientFactory): """ HathorClientFactory is used to generate HathorProtocol objects when we connected to another peer. """ - protocol: Type[MyClientProtocol] = MyClientProtocol + protocol: type[MyClientProtocol] = MyClientProtocol def __init__( self, diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index f09a83c90..5a60d63d1 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict, Iterable, List, NamedTuple, Optional, Set, Union +from typing import TYPE_CHECKING, Any, Iterable, NamedTuple, Optional, Union from structlog import get_logger from twisted.internet import endpoints @@ -49,11 +49,11 @@ class _SyncRotateInfo(NamedTuple): - candidates: List[str] - old: Set[str] - new: Set[str] - to_disable: Set[str] - to_enable: Set[str] + candidates: list[str] + old: set[str] + new: set[str] + to_disable: set[str] + to_enable: set[str] class _ConnectingPeer(NamedTuple): @@ -78,12 +78,12 @@ class GlobalRateLimiter: SEND_TIPS = 'NodeSyncTimestamp.send_tips' manager: Optional['HathorManager'] - connections: Set[HathorProtocol] - connected_peers: Dict[str, HathorProtocol] - connecting_peers: Dict[IStreamClientEndpoint, _ConnectingPeer] - handshaking_peers: Set[HathorProtocol] + connections: set[HathorProtocol] + connected_peers: dict[str, HathorProtocol] + connecting_peers: dict[IStreamClientEndpoint, _ConnectingPeer] + handshaking_peers: set[HathorProtocol] whitelist_only: bool - _sync_factories: Dict[SyncVersion, SyncManagerFactory] + _sync_factories: dict[SyncVersion, SyncManagerFactory] rate_limiter: RateLimiter @@ -146,7 +146,7 @@ def __init__(self, self.received_peer_storage = PeerStorage() # List of known peers. - self.peer_storage = PeerStorage() # Dict[string (peer.id), PeerId] + self.peer_storage = PeerStorage() # dict[string (peer.id), PeerId] # A timer to try to reconnect to the disconnect known peers. self.lc_reconnect = LoopingCall(self.reconnect_to_all) @@ -158,7 +158,7 @@ def __init__(self, self.lc_sync_update_interval: float = 5 # seconds # Peers that always have sync enabled. - self.always_enable_sync: Set[str] = set() + self.always_enable_sync: set[str] = set() # Timestamp of the last time sync was updated. self._last_sync_rotate: float = 0. @@ -247,7 +247,7 @@ def _get_peers_count(self) -> PeerConnectionsMetrics: len(self.peer_storage) ) - def get_sync_versions(self) -> Set[SyncVersion]: + def get_sync_versions(self) -> set[SyncVersion]: """Set of versions that were enabled and are supported.""" assert self.manager is not None if self.manager.has_sync_version_capability(): @@ -649,9 +649,9 @@ def sync_update(self) -> None: except Exception: self.log.error('_sync_rotate_if_needed failed', exc_info=True) - def set_always_enable_sync(self, values: List[str]) -> None: + def set_always_enable_sync(self, values: list[str]) -> None: """Set a new list of peers to always enable sync. This operation completely replaces the previous list.""" - new: Set[str] = set(values) + new: set[str] = set(values) old = self.always_enable_sync if new == old: @@ -676,14 +676,14 @@ def set_always_enable_sync(self, values: List[str]) -> None: def _calculate_sync_rotate(self) -> _SyncRotateInfo: """Calculate new sync rotation.""" - current_enabled: Set[str] = set() + current_enabled: set[str] = set() for peer_id, conn in self.connected_peers.items(): if conn.is_sync_enabled(): current_enabled.add(peer_id) candidates = list(self.connected_peers.keys()) self.rng.shuffle(candidates) - selected_peers: Set[str] = set(candidates[:self.MAX_ENABLED_SYNC]) + selected_peers: set[str] = set(candidates[:self.MAX_ENABLED_SYNC]) to_disable = current_enabled - selected_peers to_enable = selected_peers - current_enabled diff --git a/hathor/p2p/messages.py b/hathor/p2p/messages.py index 99102d586..507acab23 100644 --- a/hathor/p2p/messages.py +++ b/hathor/p2p/messages.py @@ -13,7 +13,7 @@ # limitations under the License. from enum import Enum -from typing import List, NamedTuple +from typing import NamedTuple class GetNextPayload(NamedTuple): @@ -25,7 +25,7 @@ class NextPayload(NamedTuple): timestamp: int next_timestamp: int next_offset: int - hashes: List[bytes] + hashes: list[bytes] class GetTipsPayload(NamedTuple): @@ -38,7 +38,7 @@ class TipsPayload(NamedTuple): length: int timestamp: int merkle_tree: bytes - hashes: List[str] + hashes: list[str] has_more: bool diff --git a/hathor/p2p/netfilter/chain.py b/hathor/p2p/netfilter/chain.py index 2126b3348..6463bc22e 100644 --- a/hathor/p2p/netfilter/chain.py +++ b/hathor/p2p/netfilter/chain.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional if TYPE_CHECKING: from hathor.p2p.netfilter.context import NetfilterContext @@ -27,10 +27,10 @@ def __init__(self, name: str, policy: 'NetfilterTarget'): """Initialize the chain.""" self.name = name self.table: Optional['NetfilterTable'] = None - self.rules: List['NetfilterRule'] = [] + self.rules: list['NetfilterRule'] = [] self.policy = policy - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: return { 'name': self.name, 'table': self.table.to_json() if self.table else None, diff --git a/hathor/p2p/netfilter/matches.py b/hathor/p2p/netfilter/matches.py index de6e475e8..d686ac7aa 100644 --- a/hathor/p2p/netfilter/matches.py +++ b/hathor/p2p/netfilter/matches.py @@ -14,7 +14,7 @@ from abc import ABC, abstractmethod from ipaddress import ip_address, ip_network -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from hathor.p2p.netfilter.context import NetfilterContext @@ -23,7 +23,7 @@ class NetfilterMatch(ABC): """Abstract match class.""" - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: return { 'type': type(self).__name__, 'match_params': {} @@ -45,7 +45,7 @@ def __init__(self, a: NetfilterMatch, b: NetfilterMatch): self.a = a self.b = b - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: data = super().to_json() data['match_params']['a'] = self.a.to_json() data['match_params']['b'] = self.b.to_json() @@ -87,7 +87,7 @@ def __init__(self, host: str): """ self.network = ip_network(host) - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: data = super().to_json() data['match_params']['host'] = str(self.network) return data @@ -118,7 +118,7 @@ class NetfilterMatchPeerId(NetfilterMatch): def __init__(self, peer_id: str): self.peer_id = peer_id - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: data = super().to_json() data['match_params']['peer_id'] = self.peer_id return data diff --git a/hathor/p2p/netfilter/matches_remote.py b/hathor/p2p/netfilter/matches_remote.py index f85a8910b..79b011e20 100644 --- a/hathor/p2p/netfilter/matches_remote.py +++ b/hathor/p2p/netfilter/matches_remote.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import abstractmethod -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any from structlog import get_logger from twisted.internet.defer import Deferred @@ -39,14 +39,14 @@ def __init__(self, name: str, reactor: Reactor, url: str, update_interval: int = self.url = url self.update_interval = update_interval self.lc_update = LoopingCall(self.update) - self.items: List[str] = [] - self.matches: List[NetfilterMatch] = [] + self.items: list[str] = [] + self.matches: list[NetfilterMatch] = [] self.method = 'GET' self.headers = { 'User-Agent': ['hathor-core'], } - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: data = super().to_json() data['match_params']['name'] = self.name data['match_params']['url'] = self.url diff --git a/hathor/p2p/netfilter/rule.py b/hathor/p2p/netfilter/rule.py index e7cdaecf4..0122499c9 100644 --- a/hathor/p2p/netfilter/rule.py +++ b/hathor/p2p/netfilter/rule.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict, Optional +from typing import TYPE_CHECKING, Any, Optional from uuid import uuid4 if TYPE_CHECKING: @@ -32,7 +32,7 @@ def __init__(self, match: 'NetfilterMatch', target: 'NetfilterTarget'): # UUID used to find the rule, in order to delete it self.uuid = str(uuid4()) - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: return { 'uuid': self.uuid, 'chain': self.chain.to_json() if self.chain else None, diff --git a/hathor/p2p/netfilter/table.py b/hathor/p2p/netfilter/table.py index d18423d13..7ba69f141 100644 --- a/hathor/p2p/netfilter/table.py +++ b/hathor/p2p/netfilter/table.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from hathor.p2p.netfilter.chain import NetfilterChain @@ -22,9 +22,9 @@ class NetfilterTable: """Table that contains one or more chains.""" def __init__(self, name: str): self.name = name - self.chains: Dict[str, 'NetfilterChain'] = {} + self.chains: dict[str, 'NetfilterChain'] = {} - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: return {'name': self.name} def add_chain(self, chain: 'NetfilterChain') -> 'NetfilterChain': diff --git a/hathor/p2p/netfilter/targets.py b/hathor/p2p/netfilter/targets.py index e6ec0cf7f..3ee9d2dba 100644 --- a/hathor/p2p/netfilter/targets.py +++ b/hathor/p2p/netfilter/targets.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: from hathor.p2p.netfilter.context import NetfilterContext @@ -22,7 +22,7 @@ class NetfilterTarget: terminate: bool - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: return { 'type': type(self).__name__, 'target_params': {} @@ -56,7 +56,7 @@ class NetfilterLog(NetfilterTarget): def __init__(self, msg: str) -> None: self.msg = msg - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: data = super().to_json() data['target_params']['msg'] = self.msg return data diff --git a/hathor/p2p/netfilter/utils.py b/hathor/p2p/netfilter/utils.py index 4fbc245ca..5b8c6f4f1 100644 --- a/hathor/p2p/netfilter/utils.py +++ b/hathor/p2p/netfilter/utils.py @@ -12,15 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List - from hathor.p2p.netfilter import get_table from hathor.p2p.netfilter.matches import NetfilterMatchPeerId from hathor.p2p.netfilter.rule import NetfilterRule from hathor.p2p.netfilter.targets import NetfilterReject -def add_peer_id_blacklist(peer_id_blacklist: List[str]) -> None: +def add_peer_id_blacklist(peer_id_blacklist: list[str]) -> None: """ Add a list of peer ids to a blacklist using netfilter reject """ post_peerid = get_table('filter').get_chain('post_peerid') diff --git a/hathor/p2p/node_sync.py b/hathor/p2p/node_sync.py index da2726c3f..9de01413c 100644 --- a/hathor/p2p/node_sync.py +++ b/hathor/p2p/node_sync.py @@ -16,7 +16,7 @@ import struct from collections import OrderedDict from math import inf -from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Iterator, List, Optional, Tuple +from typing import TYPE_CHECKING, Any, Callable, Generator, Iterator, Optional from weakref import WeakSet from structlog import get_logger @@ -65,8 +65,8 @@ def __init__(self, node_sync: 'NodeSyncTimestamp'): self.is_running: bool = False self.is_producing: bool = False - self.queue: OrderedDict[bytes, Tuple[BaseTransaction, List[bytes]]] = OrderedDict() - self.priority_queue: OrderedDict[bytes, Tuple[BaseTransaction, List[bytes]]] = OrderedDict() + self.queue: OrderedDict[bytes, tuple[BaseTransaction, list[bytes]]] = OrderedDict() + self.priority_queue: OrderedDict[bytes, tuple[BaseTransaction, list[bytes]]] = OrderedDict() self.delayed_call: Optional[IDelayedCall] = None @@ -220,7 +220,7 @@ def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: self.previous_timestamp: int = 0 # Latest deferred waiting for a reply. - self.deferred_by_key: Dict[str, Deferred[Any]] = {} + self.deferred_by_key: dict[str, Deferred[Any]] = {} # Maximum difference between our latest timestamp and synced timestamp to consider # that the peer is synced (in seconds). @@ -248,7 +248,7 @@ def get_status(self): 'synced_timestamp': self.synced_timestamp, } - def get_cmd_dict(self) -> Dict[ProtocolMessages, Callable[[str], None]]: + def get_cmd_dict(self) -> dict[ProtocolMessages, Callable[[str], None]]: """ Return a dict of messages. """ return { diff --git a/hathor/p2p/peer_discovery.py b/hathor/p2p/peer_discovery.py index 099308e6d..8730b7ecb 100644 --- a/hathor/p2p/peer_discovery.py +++ b/hathor/p2p/peer_discovery.py @@ -14,7 +14,7 @@ import socket from abc import ABC, abstractmethod -from typing import Any, Callable, Generator, List, Set, Tuple +from typing import Any, Callable, Generator from structlog import get_logger from twisted.internet import defer @@ -43,7 +43,7 @@ class BootstrapPeerDiscovery(PeerDiscovery): """ It implements a bootstrap peer discovery, which receives a static list of peers. """ - def __init__(self, descriptions: List[str]): + def __init__(self, descriptions: list[str]): """ :param descriptions: Descriptions of peers to connect to. """ @@ -60,7 +60,7 @@ class DNSPeerDiscovery(PeerDiscovery): """ It implements a DNS peer discovery, which looks for peers in A, AAA, and TXT records. """ - def __init__(self, hosts: List[str], default_port: int = 40403, test_mode: int = 0): + def __init__(self, hosts: list[str], default_port: int = 40403, test_mode: int = 0): """ :param hosts: List of hosts to be queried :param default_port: Port number which will be used to connect when only IP address is available. @@ -81,7 +81,7 @@ def discover_and_connect(self, connect_to: Callable[[str], None]) -> Generator[A connect_to(url) @inlineCallbacks - def dns_seed_lookup(self, host: str) -> Generator[Any, Any, List[str]]: + def dns_seed_lookup(self, host: str) -> Generator[Any, Any, list[str]]: """ Run a DNS lookup for TXT, A, and AAAA records and return a list of connection strings. """ if self.test_mode: @@ -98,7 +98,7 @@ def dns_seed_lookup(self, host: str) -> Generator[Any, Any, List[str]]: d = defer.gatherResults([d1, d2]) results = yield d - unique_urls: Set[str] = set() + unique_urls: set[str] = set() for urls in results: unique_urls.update(urls) return list(unique_urls) @@ -110,14 +110,14 @@ def errback(self, result): return [] def dns_seed_lookup_text( - self, results: Tuple[List[RRHeader], List[RRHeader], List[RRHeader]] - ) -> List[str]: + self, results: tuple[list[RRHeader], list[RRHeader], list[RRHeader]] + ) -> list[str]: """ Run a DNS lookup for TXT records to discover new peers. The `results` has three lists that contain answer records, authority records, and additional records. """ answers, _, _ = results - ret: List[str] = [] + ret: list[str] = [] for record in answers: assert isinstance(record.payload, Record_TXT) for txt in record.payload.data: @@ -127,14 +127,14 @@ def dns_seed_lookup_text( return ret def dns_seed_lookup_address( - self, results: Tuple[List[RRHeader], List[RRHeader], List[RRHeader]] - ) -> List[str]: + self, results: tuple[list[RRHeader], list[RRHeader], list[RRHeader]] + ) -> list[str]: """ Run a DNS lookup for A records to discover new peers. The `results` has three lists that contain answer records, authority records, and additional records. """ answers, _, _ = results - ret: List[str] = [] + ret: list[str] = [] for record in answers: assert isinstance(record.payload, Record_A) address = record.payload.address diff --git a/hathor/p2p/peer_id.py b/hathor/p2p/peer_id.py index 6693a76b7..ad313dde1 100644 --- a/hathor/p2p/peer_id.py +++ b/hathor/p2p/peer_id.py @@ -15,7 +15,7 @@ import base64 import hashlib from enum import Enum -from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Set, cast +from typing import TYPE_CHECKING, Any, Generator, Optional, cast from cryptography import x509 from cryptography.exceptions import InvalidSignature @@ -56,14 +56,14 @@ class PeerId: """ id: Optional[str] - entrypoints: List[str] + entrypoints: list[str] private_key: Optional[rsa.RSAPrivateKeyWithSerialization] public_key: Optional[rsa.RSAPublicKey] certificate: Optional[x509.Certificate] retry_timestamp: int # should only try connecting to this peer after this timestamp retry_interval: int # how long to wait for next connection retry. It will double for each failure retry_attempts: int # how many retries were made - flags: Set[str] + flags: set[str] def __init__(self, auto_generate_keys: bool = True) -> None: self.id = None @@ -158,7 +158,7 @@ def verify_signature(self, signature: bytes, data: bytes) -> bool: return True @classmethod - def create_from_json(cls, data: Dict[str, Any]) -> 'PeerId': + def create_from_json(cls, data: dict[str, Any]) -> 'PeerId': """ Create a new PeerId from a JSON. It is used both to load a PeerId from disk and to create a PeerId @@ -213,7 +213,7 @@ def validate(self) -> None: if public_der1 != public_der2: raise InvalidPeerIdException('private/public pair does not match') - def to_json(self, include_private_key: bool = False) -> Dict[str, Any]: + def to_json(self, include_private_key: bool = False) -> dict[str, Any]: """ Return a JSON serialization of the object. By default, it will not include the private key. If you would like to add diff --git a/hathor/p2p/peer_storage.py b/hathor/p2p/peer_storage.py index 5c4880ba9..52131df11 100644 --- a/hathor/p2p/peer_storage.py +++ b/hathor/p2p/peer_storage.py @@ -12,12 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict - from hathor.p2p.peer_id import PeerId -class PeerStorage(Dict[str, PeerId]): +class PeerStorage(dict[str, PeerId]): """ PeerStorage is used to store all known peers in memory. It is a dict of peer objects, and peers can be retrieved by their `peer.id`. """ diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index 2f2a17482..651442b50 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -14,7 +14,7 @@ import time from enum import Enum -from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Set, cast +from typing import TYPE_CHECKING, Any, Generator, Optional, cast from structlog import get_logger from twisted.internet.defer import Deferred @@ -82,10 +82,10 @@ class WarningFlags(str, Enum): transport: Optional[ITransport] state: Optional[BaseState] connection_time: float - _state_instances: Dict[PeerState, BaseState] + _state_instances: dict[PeerState, BaseState] connection_string: Optional[str] expected_peer_id: Optional[str] - warning_flags: Set[str] + warning_flags: set[str] aborting: bool diff_timestamp: Optional[int] idle_timeout: int @@ -143,7 +143,7 @@ def __init__(self, network: str, my_peer: PeerId, p2p_manager: 'ConnectionsManag self.expected_peer_id: Optional[str] = None # Set of warning flags that may be added during the connection process - self.warning_flags: Set[str] = set() + self.warning_flags: set[str] = set() # This property is used to indicate the connection is being dropped (either because of a prototcol error or # because the remote disconnected), and the following buffered lines are ignored. @@ -193,7 +193,7 @@ def get_short_peer_id(self) -> Optional[str]: return self.peer.id[:7] return None - def get_logger_context(self) -> Dict[str, Optional[str]]: + def get_logger_context(self) -> dict[str, Optional[str]]: """Return the context for logging.""" return { 'remote': self.get_short_remote(), diff --git a/hathor/p2p/rate_limiter.py b/hathor/p2p/rate_limiter.py index 3d87fb657..f341ffb91 100644 --- a/hathor/p2p/rate_limiter.py +++ b/hathor/p2p/rate_limiter.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Dict, NamedTuple, Optional +from typing import NamedTuple, Optional from hathor.util import Reactor @@ -27,10 +27,10 @@ class RateLimiter: """ # Stores the keys that are being limited and it's RateLimit - keys: Dict[str, RateLimiterLimit] + keys: dict[str, RateLimiterLimit] # Stores the last hit for each key - hits: Dict[str, RateLimiterLimit] + hits: dict[str, RateLimiterLimit] def __init__(self, reactor: Optional[Reactor] = None): self.keys = {} diff --git a/hathor/p2p/resources/netfilter.py b/hathor/p2p/resources/netfilter.py index 6522965a7..5958976ea 100644 --- a/hathor/p2p/resources/netfilter.py +++ b/hathor/p2p/resources/netfilter.py @@ -13,7 +13,7 @@ # limitations under the License. from json import JSONDecodeError -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from hathor.api_util import Resource, get_args, get_missing_params_msg, parse_args, render_options, set_cors from hathor.cli.openapi_files.register import register_resource @@ -36,7 +36,7 @@ from hathor.manager import HathorManager -def handle_body_validation(request: 'Request') -> Dict[str, Any]: +def handle_body_validation(request: 'Request') -> dict[str, Any]: """ Auxiliar method to be used by POST and DELETE requests to handle the parameters validation """ diff --git a/hathor/p2p/states/base.py b/hathor/p2p/states/base.py index 0fa90f215..ee07bc931 100644 --- a/hathor/p2p/states/base.py +++ b/hathor/p2p/states/base.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Callable, Dict, Optional, Union +from typing import TYPE_CHECKING, Callable, Optional, Union from structlog import get_logger from twisted.internet.defer import Deferred @@ -27,7 +27,7 @@ class BaseState: protocol: 'HathorProtocol' - cmd_map: Dict[ProtocolMessages, Union[Callable[[str], None], Callable[[str], Deferred[None]]]] + cmd_map: dict[ProtocolMessages, Union[Callable[[str], None], Callable[[str], Deferred[None]]]] def __init__(self, protocol: 'HathorProtocol'): self.log = logger.new(**protocol.get_logger_context()) diff --git a/hathor/p2p/states/hello.py b/hathor/p2p/states/hello.py index a98670857..bf91756eb 100644 --- a/hathor/p2p/states/hello.py +++ b/hathor/p2p/states/hello.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict, Set +from typing import TYPE_CHECKING, Any from structlog import get_logger @@ -44,7 +44,7 @@ def __init__(self, protocol: 'HathorProtocol') -> None: def _app(self) -> str: return f'Hathor v{hathor.__version__}' - def _get_hello_data(self) -> Dict[str, Any]: + def _get_hello_data(self) -> dict[str, Any]: """ Returns a dict with information about this node that will be sent to a peer. """ @@ -64,7 +64,7 @@ def _get_hello_data(self) -> Dict[str, Any]: data['sync_versions'] = [x.value for x in self._get_sync_versions()] return data - def _get_sync_versions(self) -> Set[SyncVersion]: + def _get_sync_versions(self) -> set[SyncVersion]: """Shortcut to ConnectionManager.get_sync_versions""" connections_manager = self.protocol.connections assert connections_manager is not None @@ -168,7 +168,7 @@ def handle_hello(self, payload: str) -> None: protocol.change_state(protocol.PeerState.PEER_ID) -def _parse_sync_versions(hello_data: Dict[str, Any]) -> Set[SyncVersion]: +def _parse_sync_versions(hello_data: dict[str, Any]) -> set[SyncVersion]: """Versions that are not recognized will not be included.""" if settings.CAPABILITY_SYNC_VERSION in hello_data['capabilities']: if 'sync_versions' not in hello_data: diff --git a/hathor/p2p/sync_manager.py b/hathor/p2p/sync_manager.py index 25672bed6..cc6f2b141 100644 --- a/hathor/p2p/sync_manager.py +++ b/hathor/p2p/sync_manager.py @@ -13,7 +13,7 @@ # limitations under the License. from abc import ABC, abstractmethod -from typing import Callable, Dict +from typing import Callable from hathor.p2p.messages import ProtocolMessages from hathor.transaction import BaseTransaction @@ -36,7 +36,7 @@ def stop(self) -> None: raise NotImplementedError @abstractmethod - def get_cmd_dict(self) -> Dict[ProtocolMessages, Callable[[str], None]]: + def get_cmd_dict(self) -> dict[ProtocolMessages, Callable[[str], None]]: """Command dict to add to the protocol handler""" raise NotImplementedError diff --git a/hathor/p2p/utils.py b/hathor/p2p/utils.py index 872be55a4..e9c778807 100644 --- a/hathor/p2p/utils.py +++ b/hathor/p2p/utils.py @@ -13,7 +13,7 @@ # limitations under the License. import datetime -from typing import Any, Dict, Generator, List, Optional, Set, Tuple +from typing import Any, Generator, Optional from urllib.parse import parse_qs, urlparse import requests @@ -53,7 +53,7 @@ def discover_ip_ipify() -> Optional[str]: return None -def description_to_connection_string(description: str) -> Tuple[str, Optional[str]]: +def description_to_connection_string(description: str) -> tuple[str, Optional[str]]: """ The description returned from DNS query may contain a peer-id parameter This method splits this description into the connection URL and the peer-id (in case it exists) Expected description is something like: tcp://127.0.0.1:40403/?id=123 @@ -78,7 +78,7 @@ def get_genesis_short_hash() -> str: return GENESIS_HASH.hex()[:7] -def get_settings_hello_dict() -> Dict[str, Any]: +def get_settings_hello_dict() -> dict[str, Any]: """ Return a dict of settings values that must be validated in the hello state """ settings_dict = {} @@ -99,7 +99,7 @@ def connection_string_to_host(connection_string: str) -> str: @inlineCallbacks -def discover_dns(host: str, test_mode: int = 0) -> Generator[Any, Any, List[str]]: +def discover_dns(host: str, test_mode: int = 0) -> Generator[Any, Any, list[str]]: """ Start a DNS peer discovery object and execute a search for the host Returns the DNS string from the requested host @@ -151,7 +151,7 @@ def generate_certificate(private_key: RSAPrivateKey, ca_file: str, ca_pkey_file: return certificate -def parse_file(text: str, *, header: Optional[str] = None) -> List[str]: +def parse_file(text: str, *, header: Optional[str] = None) -> list[str]: """Parses a list of strings.""" if header is None: header = 'hathor-whitelist' @@ -164,7 +164,7 @@ def parse_file(text: str, *, header: Optional[str] = None) -> List[str]: return list(nonblank_lines) -def parse_whitelist(text: str, *, header: Optional[str] = None) -> Set[str]: +def parse_whitelist(text: str, *, header: Optional[str] = None) -> set[str]: """ Parses the list of whitelist peer ids Example: diff --git a/hathor/profiler/cpu.py b/hathor/profiler/cpu.py index 25fb1f5ab..fde33ed7f 100644 --- a/hathor/profiler/cpu.py +++ b/hathor/profiler/cpu.py @@ -15,11 +15,11 @@ import time from collections import defaultdict from functools import wraps -from typing import Any, Callable, DefaultDict, List, Tuple, Union +from typing import Any, Callable, Union from twisted.internet.task import LoopingCall -Key = Tuple[str, ...] +Key = tuple[str, ...] class ProcItem: @@ -55,13 +55,13 @@ def __init__(self, *, update_interval: float = 3.0, expiry: float = 15.0): """ # Store the measures for each key. - self.measures: DefaultDict[Key, ProcItem] = defaultdict(ProcItem) + self.measures: defaultdict[Key, ProcItem] = defaultdict(ProcItem) # Error message if something goes wrong. self.error: str = '' # Stack of the current sequence of markers. - self.stack: List[Tuple[str, float]] = [] + self.stack: list[tuple[str, float]] = [] # Wall time when update was last called. self.last_update = time.time() @@ -70,7 +70,7 @@ def __init__(self, *, update_interval: float = 3.0, expiry: float = 15.0): self.last_process_time = 0.0 # List of processes and their data. It is the output of the profiler. - self.proc_list: List[Tuple[Key, ProcItem]] = [] + self.proc_list: list[tuple[Key, ProcItem]] = [] # Timer to call `self.update()` periodically. self.lc_update = LoopingCall(self.update) @@ -111,7 +111,7 @@ def stop(self) -> None: self.enabled = False self.lc_update.stop() - def get_proc_list(self) -> List[Tuple[Key, ProcItem]]: + def get_proc_list(self) -> list[tuple[Key, ProcItem]]: """Return the process list.""" return self.proc_list @@ -158,7 +158,7 @@ def update(self) -> None: ptime = time.process_time() interval = ptime - self.last_process_time - proc_list: List[Tuple[Key, ProcItem]] = [] + proc_list: list[tuple[Key, ProcItem]] = [] # Update keys. keys_to_remove = set() diff --git a/hathor/prometheus.py b/hathor/prometheus.py index 1bae1c59d..8b3bd51b8 100644 --- a/hathor/prometheus.py +++ b/hathor/prometheus.py @@ -13,7 +13,7 @@ # limitations under the License. import os -from typing import TYPE_CHECKING, Dict +from typing import TYPE_CHECKING from prometheus_client import CollectorRegistry, Gauge, write_to_textfile from twisted.internet.task import LoopingCall @@ -89,8 +89,8 @@ def __init__(self, metrics: 'Metrics', path: str, filename: str = 'hathor.prom', self.filepath: str = os.path.join(path, filename) # Stores all Gauge objects for each metric (key is the metric name) - # Dict[str, prometheus_client.Gauge] - self.metric_gauges: Dict[str, Gauge] = {} + # dict[str, prometheus_client.Gauge] + self.metric_gauges: dict[str, Gauge] = {} # Setup initial prometheus lib objects for each metric self._initial_setup() diff --git a/hathor/pubsub.py b/hathor/pubsub.py index 0a0e0153e..2c03190d0 100644 --- a/hathor/pubsub.py +++ b/hathor/pubsub.py @@ -14,7 +14,7 @@ from collections import defaultdict, deque from enum import Enum -from typing import TYPE_CHECKING, Any, Callable, Deque, Dict, List, Tuple, cast +from typing import TYPE_CHECKING, Any, Callable, cast from twisted.internet.interfaces import IReactorFromThreads @@ -161,11 +161,11 @@ class PubSubManager: It is used to let independent objects respond to events. """ - _subscribers: Dict[HathorEvents, List[PubSubCallable]] + _subscribers: dict[HathorEvents, list[PubSubCallable]] def __init__(self, reactor: Reactor) -> None: self._subscribers = defaultdict(list) - self.queue: Deque[Tuple[PubSubCallable, HathorEvents, EventArguments]] = deque() + self.queue: deque[tuple[PubSubCallable, HathorEvents, EventArguments]] = deque() self.reactor = reactor def subscribe(self, key: HathorEvents, fn: PubSubCallable) -> None: diff --git a/hathor/simulator/clock.py b/hathor/simulator/clock.py index 2180f2c7b..db5f466e4 100644 --- a/hathor/simulator/clock.py +++ b/hathor/simulator/clock.py @@ -13,7 +13,7 @@ # limitations under the License. import heapq -from typing import Any, Callable, List +from typing import Any, Callable from twisted.internet.base import DelayedCall from twisted.internet.interfaces import IDelayedCall, IReactorTime @@ -56,7 +56,7 @@ def callLater(self, delay: float, callable: Callable[..., Any], *args: object, * heapq.heappush(self.calls, (dc.getTime(), dc)) return dc - def getDelayedCalls(self) -> List[IDelayedCall]: + def getDelayedCalls(self) -> list[IDelayedCall]: """ See L{twisted.internet.interfaces.IReactorTime.getDelayedCalls} """ diff --git a/hathor/simulator/fake_connection.py b/hathor/simulator/fake_connection.py index a292d8510..2bb061a0d 100644 --- a/hathor/simulator/fake_connection.py +++ b/hathor/simulator/fake_connection.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import deque -from typing import TYPE_CHECKING, Deque, Optional +from typing import TYPE_CHECKING, Optional from OpenSSL.crypto import X509 from structlog import get_logger @@ -60,8 +60,8 @@ def __init__(self, manager1: 'HathorManager', manager2: 'HathorManager', *, late self.tr2 = HathorStringTransport(self._proto1.my_peer) self._do_buffering = True - self._buf1: Deque[str] = deque() - self._buf2: Deque[str] = deque() + self._buf1: deque[str] = deque() + self._buf2: deque[str] = deque() self._proto1.makeConnection(self.tr1) self._proto2.makeConnection(self.tr2) diff --git a/hathor/simulator/miner/dummy_miner.py b/hathor/simulator/miner/dummy_miner.py index a1207f19e..a2bdd67de 100644 --- a/hathor/simulator/miner/dummy_miner.py +++ b/hathor/simulator/miner/dummy_miner.py @@ -12,8 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List - from structlog import get_logger from hathor.conf import HathorSettings @@ -31,9 +29,9 @@ class DummyMiner(AbstractMiner): """Simulate blocks mined at pre-determined times.""" _start_time: int - blocks: List[Block] = [] + blocks: list[Block] = [] - def __init__(self, manager: HathorManager, rng: Random, *, block_times: List[int]): + def __init__(self, manager: HathorManager, rng: Random, *, block_times: list[int]): """ Creates a DummyMiner where each block will be generated at the times specified in block_times, in absolute seconds from the manager's clock. @@ -43,7 +41,7 @@ def __init__(self, manager: HathorManager, rng: Random, *, block_times: List[int """ super().__init__(manager, rng) - self._block_times: List[int] = block_times + self._block_times: list[int] = block_times def start(self) -> None: self._start_time = int(self._clock.seconds()) diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index a4d60744b..b17e0a074 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -15,7 +15,7 @@ import secrets import time from collections import OrderedDict -from typing import TYPE_CHECKING, Any, Generator, List, Optional +from typing import TYPE_CHECKING, Any, Generator, Optional from mnemonic import Mnemonic from structlog import get_logger @@ -120,7 +120,7 @@ def __init__(self, seed: Optional[int] = None): self._network = 'testnet' self._clock = HeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() - self._connections: List['FakeConnection'] = [] + self._connections: list['FakeConnection'] = [] self._started = False def start(self) -> None: diff --git a/hathor/simulator/tx_generator.py b/hathor/simulator/tx_generator.py index aa14d8888..27041ac3c 100644 --- a/hathor/simulator/tx_generator.py +++ b/hathor/simulator/tx_generator.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import deque -from typing import TYPE_CHECKING, Deque, List +from typing import TYPE_CHECKING from structlog import get_logger @@ -49,7 +49,7 @@ def __init__(self, manager: 'HathorManager', rng: Random, *, # List of addresses to send tokens. If this list is empty, tokens will be sent to an address # of its own wallet. - self.send_to: List[str] = [] + self.send_to: list[str] = [] self.clock = manager.reactor self.rate = rate @@ -63,7 +63,7 @@ def __init__(self, manager: 'HathorManager', rng: Random, *, # Most recent transactions generated here. # The lowest index has the most recent transaction. self.transactions_found: int = 0 - self.latest_transactions: Deque[Transaction] = deque() + self.latest_transactions: deque[Transaction] = deque() self.double_spending_only = False diff --git a/hathor/storage/rocksdb_storage.py b/hathor/storage/rocksdb_storage.py index 6e9f4978f..232a2ec71 100644 --- a/hathor/storage/rocksdb_storage.py +++ b/hathor/storage/rocksdb_storage.py @@ -13,7 +13,7 @@ # limitations under the License. import os -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Optional if TYPE_CHECKING: import rocksdb @@ -44,7 +44,7 @@ def __init__(self, path: str = './', cache_capacity: Optional[int] = None): allow_mmap_reads=True, # default is already True ) - cf_names: List[bytes] + cf_names: list[bytes] try: # get the list of existing column families cf_names = rocksdb.list_column_families(db_path, options) diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index c7acb0c6a..2c47f1e5c 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -22,7 +22,7 @@ from os import cpu_count from string import hexdigits from time import sleep -from typing import TYPE_CHECKING, Any, Callable, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Callable, Iterator, NamedTuple, Optional, Union, cast from uuid import UUID, uuid4 from structlog import get_logger @@ -126,7 +126,7 @@ class MinerJob(NamedTuple): nonce_size: Any = Value('I') weight: Any = Value('d') - def update_job(self, params: Dict[str, Any]) -> bool: + def update_job(self, params: dict[str, Any]) -> bool: """ Updates job variables shared between processes. Should contain the following params: @@ -138,7 +138,7 @@ def update_job(self, params: Dict[str, Any]) -> bool: } :param params: Hathor Stratum job method request params - :type params: Dict + :type params: dict :return: True if the update is sucessful :rtype: bool @@ -225,14 +225,14 @@ def lineReceived(self, line: bytes) -> None: }) @abstractmethod - def handle_request(self, method: str, params: Optional[Union[List, Dict]], msgid: Optional[str]) -> None: + def handle_request(self, method: str, params: Optional[Union[list, dict]], msgid: Optional[str]) -> None: """ Handles any valid request. :param method: JSON-RPC 2.0 request method :type method: str :param params: JSON-RPC 2.0 request params - :type params: Optional[Union[List, Dict]] + :type params: Optional[Union[list, dict]] :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[str] @@ -252,18 +252,18 @@ def handle_result(self, result: Any, msgid: Optional[str]) -> None: raise NotImplementedError @abstractmethod - def handle_error(self, error: Dict, data: Any, msgid: Optional[str]) -> None: + def handle_error(self, error: dict, data: Any, msgid: Optional[str]) -> None: """ Handles any valid error. :param error: JSON-RPC 2.0 error message - :type error: Dict + :type error: dict :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[UUID] """ raise NotImplementedError - def send_request(self, method: str, params: Optional[Union[List, Dict]], msgid: Union[str, int, None] = None, + def send_request(self, method: str, params: Optional[Union[list, dict]], msgid: Union[str, int, None] = None, ok: Optional[bool] = None) -> None: """ Sends a JSON-RPC 2.0 request. @@ -271,12 +271,12 @@ def send_request(self, method: str, params: Optional[Union[List, Dict]], msgid: :type method: str :param params: JSON-RPC 2.0 request params - :type params: Optional[Union[List, Dict]] + :type params: Optional[Union[list, dict]] :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[UUID] """ - data: Dict[str, Any] = {'method': method, 'params': params} + data: dict[str, Any] = {'method': method, 'params': params} self.log.debug('send request', method=method, params=params) # XXX: keeping the same msgid type the client sent data['id'] = msgid @@ -299,11 +299,11 @@ def send_result(self, result: Any, msgid: Optional[str]) -> None: self.log.debug('send result', data=data) return self.send_json(data) - def send_error(self, error: Dict, msgid: Optional[str] = None, data: Any = None) -> None: + def send_error(self, error: dict, msgid: Optional[str] = None, data: Any = None) -> None: """ Sends a JSON-RPC 2.0 error. :param error: JSON-RPC 2.0 error message - :type error: Dict + :type error: dict :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[UUID] @@ -318,11 +318,11 @@ def send_error(self, error: Dict, msgid: Optional[str] = None, data: Any = None) if error['code'] <= UNRECOVERABLE_ERROR_CODE_MAX and self.transport is not None: self.transport.loseConnection() - def send_json(self, json: Dict) -> None: + def send_json(self, json: dict) -> None: """ Encodes a JSON and send it through the LineReceiver interface. :param json: JSON-RPC 2.0 message - :type json: Dict + :type json: dict """ try: message = json_dumpb(json) @@ -350,8 +350,8 @@ class StratumProtocol(JSONRPC): address: IAddress current_job: Optional[ServerJob] - jobs: Dict[UUID, ServerJob] - job_ids: List[UUID] + jobs: dict[UUID, ServerJob] + job_ids: list[UUID] factory: 'StratumFactory' manager: 'HathorManager' miner_id: Optional[UUID] @@ -398,14 +398,14 @@ def connectionLost(self, reason: Failure = connectionDone) -> None: assert self.miner_id is not None self.factory.miner_protocols.pop(self.miner_id, None) - def handle_request(self, method: str, params: Optional[Union[List, Dict]], msgid: Optional[str]) -> None: + def handle_request(self, method: str, params: Optional[Union[list, dict]], msgid: Optional[str]) -> None: """ Handles subscribe and submit requests. :param method: JSON-RPC 2.0 request method :type method: str :param params: JSON-RPC 2.0 request params - :type params: Optional[Union[List, Dict]] + :type params: Optional[Union[list, dict]] :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[str] @@ -417,10 +417,10 @@ def handle_request(self, method: str, params: Optional[Union[List, Dict]], msgid return self.send_error(NODE_SYNCING, msgid) if method in ['mining.subscribe', 'subscribe']: - params = cast(Dict, params) + params = cast(dict, params) return self.handle_subscribe(params, msgid) if method in ['mining.submit', 'submit']: - params = cast(Dict, params) + params = cast(dict, params) return self.handle_submit(params, msgid) self.send_error(METHOD_NOT_FOUND, msgid, data={'method': method, 'supported_methods': ['submit', 'subscribe']}) @@ -429,11 +429,11 @@ def handle_result(self, result: Any, msgid: Optional[str]) -> None: """ Logs any result since there are not supposed to be any """ self.log.debug('handle result', msgid=msgid, result=result) - def handle_error(self, error: Dict, data: Any, msgid: Optional[str]) -> None: + def handle_error(self, error: dict, data: Any, msgid: Optional[str]) -> None: """ Logs any errors since there are not supposed to be any """ self.log.error('handle error', msgid=msgid, error=error) - def handle_subscribe(self, params: Dict, msgid: Optional[str]) -> None: + def handle_subscribe(self, params: dict, msgid: Optional[str]) -> None: """ Handles subscribe request by answering it and triggering a job request. :param msgid: JSON-RPC 2.0 message id @@ -471,11 +471,11 @@ def handle_subscribe(self, params: Dict, msgid: Optional[str]) -> None: self.subscribed = True self.job_request() - def handle_submit(self, params: Dict, msgid: Optional[str]) -> None: + def handle_submit(self, params: dict, msgid: Optional[str]) -> None: """ Handles submit request by validating and propagating the result :param params: a dict containing a valid uui4 hex as `job_id` and a valid transaction nonce as `nonce` - :type params: Dict + :type params: dict :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[UUID] @@ -723,13 +723,13 @@ class StratumFactory(Factory): Interfaces with nodes to keep mining jobs up to date and to submit successful ones. """ reactor: Reactor - jobs: Set[UUID] + jobs: set[UUID] manager: 'HathorManager' - miner_protocols: Dict[UUID, StratumProtocol] - tx_queue: List[bytes] - mining_tx_pool: Dict[bytes, BaseTransaction] - mined_txs: Dict[bytes, Transaction] - deferreds_tx: Dict[bytes, Deferred] + miner_protocols: dict[UUID, StratumProtocol] + tx_queue: list[bytes] + mining_tx_pool: dict[bytes, BaseTransaction] + mined_txs: dict[bytes, Transaction] + deferreds_tx: dict[bytes, Deferred] def __init__(self, manager: 'HathorManager', reactor: Reactor = reactor): self.log = logger.new() @@ -790,10 +790,10 @@ def get_current_timestamp(self) -> int: """ return int(self.reactor.seconds()) - def get_stats(self) -> List[MinerStatistics]: + def get_stats(self) -> list[MinerStatistics]: return [protocol.get_stats() for protocol in self.miner_protocols.values()] - def get_stats_resource(self) -> List[Dict]: + def get_stats_resource(self) -> list[dict]: return [stat._asdict() for stat in self.get_stats()] @@ -812,8 +812,8 @@ class StratumClient(JSONRPC): queue: MQueue proc_count: Optional[int] - job: Dict - miners: List[Process] + job: dict + miners: list[Process] loop: Optional[task.LoopingCall] signal: Any job_data: MinerJob @@ -870,14 +870,14 @@ def stop(self) -> None: def connectionMade(self) -> None: self.send_request('subscribe', {'address': self.address}, self._next_id()) - def handle_request(self, method: str, params: Optional[Union[List, Dict]], msgid: Optional[str]) -> None: + def handle_request(self, method: str, params: Optional[Union[list, dict]], msgid: Optional[str]) -> None: """ Handles job requests. :param method: JSON-RPC 2.0 request method :type method: str :param params: Hathor Stratum job request params - :type params: Dict + :type params: dict :param msgid: JSON-RPC 2.0 message id :type msgid: Optional[str] @@ -894,7 +894,7 @@ def handle_result(self, result: Any, msgid: Optional[str]) -> None: """ Logs any result since there are not supposed to be any """ self.log.debug('handle result', result=result) - def handle_error(self, error: Dict, data: Any, msgid: Optional[str]) -> None: + def handle_error(self, error: dict, data: Any, msgid: Optional[str]) -> None: """ Logs any error since there are not supposed to be any """ self.log.warn('handle_error', error=error, data=data) @@ -919,7 +919,7 @@ def miner_job(index: int, process_num: int, job_data: MinerJob, signal: 'ctypes. :param queue: queue used to submit solutions to supervisor process :type queue: MQueue """ - def update_job() -> Tuple[bytes, int, Any, int, int]: + def update_job() -> tuple[bytes, int, Any, int, int]: while signal.value == StratumClient.SLEEP: sleep(StratumClient.NAP_DURATION) return ( diff --git a/hathor/sysctl/p2p/manager.py b/hathor/sysctl/p2p/manager.py index 880f201ca..ed2d4f606 100644 --- a/hathor/sysctl/p2p/manager.py +++ b/hathor/sysctl/p2p/manager.py @@ -13,16 +13,15 @@ # limitations under the License. import os -from typing import List, Tuple from hathor.p2p.manager import ConnectionsManager from hathor.sysctl.exception import SysctlException from hathor.sysctl.sysctl import Sysctl -def parse_text(text: str) -> List[str]: +def parse_text(text: str) -> list[str]: """Parse text per line skipping empty lines and comments.""" - ret: List[str] = [] + ret: list[str] = [] for line in text.splitlines(): line = line.strip() if not line: @@ -73,7 +72,7 @@ def set_force_sync_rotate(self) -> None: """Force a sync rotate.""" self.connections._sync_rotate_if_needed(force=True) - def get_global_send_tips_rate_limit(self) -> Tuple[int, float]: + def get_global_send_tips_rate_limit(self) -> tuple[int, float]: """Return the global rate limiter for SEND_TIPS.""" limit = self.connections.rate_limiter.get_limit(self.connections.GlobalRateLimiter.SEND_TIPS) if limit is None: @@ -106,11 +105,11 @@ def set_lc_sync_update_interval(self, value: float) -> None: self.connections.lc_sync_update.stop() self.connections.lc_sync_update.start(self.connections.lc_sync_update_interval, now=False) - def get_always_enable_sync(self) -> List[str]: + def get_always_enable_sync(self) -> list[str]: """Return the list of sync-always-enabled peers.""" return list(self.connections.always_enable_sync) - def set_always_enable_sync(self, values: List[str]) -> None: + def set_always_enable_sync(self, values: list[str]) -> None: """Change the list of sync-always-enabled peers.""" self.connections.set_always_enable_sync(values) @@ -118,7 +117,7 @@ def set_always_enable_sync_readtxt(self, file_path: str) -> None: """Update the list of sync-always-enabled peers from a file.""" if not os.path.isfile(file_path): raise SysctlException(f'file not found: {file_path}') - values: List[str] + values: list[str] with open(file_path, 'r') as fp: values = parse_text(fp.read()) self.connections.set_always_enable_sync(values) diff --git a/hathor/sysctl/protocol.py b/hathor/sysctl/protocol.py index c84b10900..ac60965f2 100644 --- a/hathor/sysctl/protocol.py +++ b/hathor/sysctl/protocol.py @@ -14,7 +14,7 @@ import inspect import json -from typing import TYPE_CHECKING, Any, Callable, List, Optional +from typing import TYPE_CHECKING, Any, Callable, Optional from pydantic import ValidationError from twisted.protocols.basic import LineReceiver @@ -105,7 +105,7 @@ def help(self, path: str) -> None: self.sendError(f'{path} not found') return - output: List[str] = [] + output: list[str] = [] output.extend(self._get_method_help('getter', cmd.getter)) output.append('') output.extend(self._get_method_help('setter', cmd.setter)) @@ -135,12 +135,12 @@ def _deserialize(self, value_str: str) -> Any: return tuple(json.loads(x) for x in parts) return json.loads(value_str) - def _get_method_help(self, method_name: str, method: Optional[Callable]) -> List[str]: + def _get_method_help(self, method_name: str, method: Optional[Callable]) -> list[str]: """Return a list of strings with the help for `method`.""" if method is None: return [f'{method_name}: not available'] - output: List[str] = [] + output: list[str] = [] doc: str = inspect.getdoc(method) or '(no help found)' signature = inspect.signature(method) output.append(f'{method_name}{signature}:') diff --git a/hathor/sysctl/sysctl.py b/hathor/sysctl/sysctl.py index a2f40a778..f9a805af8 100644 --- a/hathor/sysctl/sysctl.py +++ b/hathor/sysctl/sysctl.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, Dict, Iterator, NamedTuple, Optional, Tuple +from typing import Any, Callable, Iterator, NamedTuple, Optional from pydantic import validate_arguments @@ -31,8 +31,8 @@ class Sysctl: """A node in the sysctl tree.""" def __init__(self) -> None: - self._children: Dict[str, 'Sysctl'] = {} - self._commands: Dict[str, SysctlCommand] = {} + self._children: dict[str, 'Sysctl'] = {} + self._commands: dict[str, SysctlCommand] = {} def put_child(self, path: str, sysctl: 'Sysctl') -> None: """Add a child to the tree.""" @@ -93,7 +93,7 @@ def path_join(self, p1: str, p2: str) -> str: return p2 return f'{p1}.{p2}' - def get_all(self, prefix: str = '') -> Iterator[Tuple[str, Any]]: + def get_all(self, prefix: str = '') -> Iterator[tuple[str, Any]]: """Return all paths and values, usually for backup.""" for path, child in self._children.items(): yield from child.get_all(self.path_join(prefix, path)) diff --git a/hathor/transaction/aux_pow.py b/hathor/transaction/aux_pow.py index 978cccef0..795d9a9f7 100644 --- a/hathor/transaction/aux_pow.py +++ b/hathor/transaction/aux_pow.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, NamedTuple +from typing import NamedTuple from structlog import get_logger @@ -26,7 +26,7 @@ class BitcoinAuxPow(NamedTuple): header_head: bytes # 36 bytes coinbase_head: bytes # variable length (at least 47 bytes) coinbase_tail: bytes # variable length (at least 18 bytes) - merkle_path: List[bytes] # each element has 32 bytes + merkle_path: list[bytes] # each element has 32 bytes header_tail: bytes # 12 bytes @classmethod diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 3b892cd5a..7ff003d69 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -22,7 +22,7 @@ from itertools import chain from math import inf, isfinite, log from struct import error as StructError, pack -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Iterator, List, Optional, Set, Tuple, Type +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Iterator, Optional from structlog import get_logger @@ -120,13 +120,13 @@ def _missing_(cls, value: Any) -> None: raise ValueError(f'Invalid version: {value}') - def get_cls(self) -> Type['BaseTransaction']: + def get_cls(self) -> type['BaseTransaction']: from hathor.transaction.block import Block from hathor.transaction.merge_mined_block import MergeMinedBlock from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.transaction import Transaction - cls_map: Dict[TxVersion, Type[BaseTransaction]] = { + cls_map: dict[TxVersion, type[BaseTransaction]] = { TxVersion.REGULAR_BLOCK: Block, TxVersion.REGULAR_TRANSACTION: Transaction, TxVersion.TOKEN_CREATION_TRANSACTION: TokenCreationTransaction, @@ -167,9 +167,9 @@ def __init__(self, signal_bits: int = 0, version: int = TxVersion.REGULAR_BLOCK, weight: float = 0, - inputs: Optional[List['TxInput']] = None, - outputs: Optional[List['TxOutput']] = None, - parents: Optional[List[VertexId]] = None, + inputs: Optional[list['TxInput']] = None, + outputs: Optional[list['TxOutput']] = None, + parents: Optional[list[VertexId]] = None, hash: Optional[VertexId] = None, storage: Optional['TransactionStorage'] = None) -> None: """ @@ -203,7 +203,7 @@ def log(cls): """ return _base_transaction_log - def _get_formatted_fields_dict(self, short: bool = True) -> Dict[str, str]: + def _get_formatted_fields_dict(self, short: bool = True) -> dict[str, str]: """ Used internally on __repr__ and __str__, returns a dict of `field_name: formatted_value`. """ from collections import OrderedDict @@ -435,26 +435,26 @@ def get_struct(self) -> bytes: struct_bytes += self.get_struct_nonce() return struct_bytes - def get_all_dependencies(self) -> Set[bytes]: + def get_all_dependencies(self) -> set[bytes]: """Set of all tx-hashes needed to fully validate this tx, including parent blocks/txs and inputs.""" return set(chain(self.parents, (i.tx_id for i in self.inputs))) - def get_tx_dependencies(self) -> Set[bytes]: + def get_tx_dependencies(self) -> set[bytes]: """Set of all tx-hashes needed to fully validate this, except for block parent, i.e. only tx parents/inputs.""" parents = self.parents[1:] if self.is_block else self.parents return set(chain(parents, (i.tx_id for i in self.inputs))) - def get_tx_parents(self) -> Set[bytes]: + def get_tx_parents(self) -> set[bytes]: """Set of parent tx hashes, typically used for syncing transactions.""" return set(self.parents[1:] if self.is_block else self.parents) - def get_related_addresses(self) -> Set[str]: + def get_related_addresses(self) -> set[str]: """ Return a set of addresses collected from tx's inputs and outputs. """ from hathor.transaction.scripts import parse_address_script assert self.storage is not None - addresses: Set[str] = set() + addresses: set[str] = set() def add_address_from_output(output: 'TxOutput') -> None: script_type_out = parse_address_script(output.script) @@ -507,7 +507,7 @@ def set_validation(self, validation: ValidationState) -> None: else: self._mark_partially_validated() - def validate_checkpoint(self, checkpoints: List[Checkpoint]) -> bool: + def validate_checkpoint(self, checkpoints: list[Checkpoint]) -> bool: """ Run checkpoint validations and update the validation state. If no exception is raised, the ValidationState will end up as `CHECKPOINT` and return `True`. @@ -572,7 +572,7 @@ def _unmark_partially_validated(self) -> None: tx_meta.del_voided_by(settings.PARTIALLY_VALIDATED_ID) @abstractmethod - def verify_checkpoint(self, checkpoints: List[Checkpoint]) -> None: + def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: """Check that this tx is a known checkpoint or is parent of another checkpoint-valid tx/block. To be implemented by tx/block, used by `self.validate_checkpoint`. Should not modify the validation state.""" @@ -998,10 +998,10 @@ def get_spent_tx(self, input_tx: 'TxInput') -> 'BaseTransaction': assert self.storage is not None return self.storage.get_transaction(input_tx.tx_id) - def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> Dict[str, Any]: - """ Creates a json serializable Dict object from self + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: + """ Creates a json serializable dict object from self """ - data: Dict[str, Any] = {} + data: dict[str, Any] = {} data['hash'] = self.hash_hex or None data['nonce'] = self.nonce data['timestamp'] = self.timestamp @@ -1014,7 +1014,7 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - data['inputs'] = [] for tx_input in self.inputs: - data_input: Dict[str, Any] = {} + data_input: dict[str, Any] = {} data_input['tx_id'] = tx_input.tx_id.hex() data_input['index'] = tx_input.index data_input['data'] = base64.b64encode(tx_input.data).decode('utf-8') @@ -1029,11 +1029,11 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - return data - def to_json_extended(self) -> Dict[str, Any]: + def to_json_extended(self) -> dict[str, Any]: assert self.hash is not None assert self.storage is not None - def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> Dict[str, Any]: + def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> dict[str, Any]: data = tx_out.to_json(decode_script=True) data['token'] = tx.get_token_uid(tx_out.get_token_index()).hex() data['decoded'].pop('token_data', None) @@ -1041,7 +1041,7 @@ def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> Dict[str, Any]: return data meta = self.get_metadata() - ret: Dict[str, Any] = { + ret: dict[str, Any] = { 'tx_id': self.hash_hex, 'version': int(self.version), 'weight': self.weight, @@ -1075,7 +1075,7 @@ def serialize_output(tx: BaseTransaction, tx_out: TxOutput) -> Dict[str, Any]: return ret - def validate_tx_error(self) -> Tuple[bool, str]: + def validate_tx_error(self) -> tuple[bool, str]: """ Verify if tx is valid and return success and possible error message :return: Success if tx is valid and possible error message, if not @@ -1166,7 +1166,7 @@ def get_sighash_bytes(self) -> bytes: return bytes(ret) @classmethod - def create_from_bytes(cls, buf: bytes, *, verbose: VerboseCallback = None) -> Tuple['TxInput', bytes]: + def create_from_bytes(cls, buf: bytes, *, verbose: VerboseCallback = None) -> tuple['TxInput', bytes]: """ Creates a TxInput from a serialized input. Returns the input and remaining bytes """ @@ -1184,7 +1184,7 @@ def create_from_bytes(cls, buf: bytes, *, verbose: VerboseCallback = None) -> Tu return txin, buf @classmethod - def create_from_dict(cls, data: Dict) -> 'TxInput': + def create_from_dict(cls, data: dict) -> 'TxInput': """ Creates a TxInput from a human readable dict.""" return cls( bytes.fromhex(data['tx_id']), @@ -1192,10 +1192,10 @@ def create_from_dict(cls, data: Dict) -> 'TxInput': base64.b64decode(data['data']) if data.get('data') else b'', ) - def to_human_readable(self) -> Dict[str, Any]: + def to_human_readable(self) -> dict[str, Any]: """Returns dict of Input information, ready to be serialized - :rtype: Dict + :rtype: dict """ return { 'tx_id': self.tx_id.hex(), # string @@ -1264,7 +1264,7 @@ def __bytes__(self) -> bytes: return ret @classmethod - def create_from_bytes(cls, buf: bytes, *, verbose: VerboseCallback = None) -> Tuple['TxOutput', bytes]: + def create_from_bytes(cls, buf: bytes, *, verbose: VerboseCallback = None) -> tuple['TxOutput', bytes]: """ Creates a TxOutput from a serialized output. Returns the output and remaining bytes """ @@ -1305,7 +1305,7 @@ def can_melt_token(self) -> bool: """Whether this utxo can melt tokens""" return self.is_token_authority() and ((self.value & self.TOKEN_MELT_MASK) > 0) - def to_human_readable(self) -> Dict[str, Any]: + def to_human_readable(self) -> dict[str, Any]: """Checks what kind of script this is and returns it in human readable form """ from hathor.transaction.scripts import NanoContractMatchValues, parse_address_script @@ -1323,8 +1323,8 @@ def to_human_readable(self) -> Dict[str, Any]: return {} - def to_json(self, *, decode_script: bool = False) -> Dict[str, Any]: - data: Dict[str, Any] = {} + def to_json(self, *, decode_script: bool = False) -> dict[str, Any]: + data: dict[str, Any] = {} data['value'] = self.value data['token_data'] = self.token_data data['script'] = base64.b64encode(self.script).decode('utf-8') @@ -1333,7 +1333,7 @@ def to_json(self, *, decode_script: bool = False) -> Dict[str, Any]: return data -def bytes_to_output_value(buf: bytes) -> Tuple[int, bytes]: +def bytes_to_output_value(buf: bytes) -> tuple[int, bytes]: (value_high_byte,), _ = unpack('!b', buf) if value_high_byte < 0: output_struct = '!q' diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index e2531c9af..4a8ac0c5e 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -14,7 +14,7 @@ import base64 from struct import pack -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional from hathor import daa from hathor.checkpoint import Checkpoint @@ -54,8 +54,8 @@ def __init__(self, signal_bits: int = 0, version: int = TxVersion.REGULAR_BLOCK, weight: float = 0, - outputs: Optional[List[TxOutput]] = None, - parents: Optional[List[bytes]] = None, + outputs: Optional[list[TxOutput]] = None, + parents: Optional[list[bytes]] = None, hash: Optional[bytes] = None, data: bytes = b'', storage: Optional['TransactionStorage'] = None) -> None: @@ -63,7 +63,7 @@ def __init__(self, outputs=outputs or [], parents=parents or [], hash=hash, storage=storage) self.data = data - def _get_formatted_fields_dict(self, short: bool = True) -> Dict[str, str]: + def _get_formatted_fields_dict(self, short: bool = True) -> dict[str, str]: d = super()._get_formatted_fields_dict(short) if not short: d.update(data=self.data.hex()) @@ -236,13 +236,13 @@ def get_token_uid(self, index: int) -> bytes: return settings.HATHOR_TOKEN_UID # TODO: maybe introduce convention on serialization methods names (e.g. to_json vs get_struct) - def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> Dict[str, Any]: + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: json = super().to_json(decode_script=decode_script, include_metadata=include_metadata) json['tokens'] = [] json['data'] = base64.b64encode(self.data).decode('utf-8') return json - def to_json_extended(self) -> Dict[str, Any]: + def to_json_extended(self) -> dict[str, Any]: json = super().to_json_extended() json['height'] = self.get_metadata().height @@ -264,7 +264,7 @@ def verify_basic(self, skip_block_weight_verification: bool = False) -> None: self.verify_weight() self.verify_reward() - def verify_checkpoint(self, checkpoints: List[Checkpoint]) -> None: + def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: assert self.hash is not None assert self.storage is not None meta = self.get_metadata() diff --git a/hathor/transaction/genesis.py b/hathor/transaction/genesis.py index 851e4db91..408428523 100644 --- a/hathor/transaction/genesis.py +++ b/hathor/transaction/genesis.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Optional from hathor.conf import HathorSettings from hathor.transaction import BaseTransaction, Block, Transaction, TxOutput @@ -64,7 +64,7 @@ def _get_genesis_hash() -> bytes: GENESIS_HASH = _get_genesis_hash() -def _get_genesis_transactions_unsafe(tx_storage: Optional['TransactionStorage']) -> List[BaseTransaction]: +def _get_genesis_transactions_unsafe(tx_storage: Optional['TransactionStorage']) -> list[BaseTransaction]: """You shouldn't get genesis directly. Please, get it from your storage instead.""" genesis = [] for tx in GENESIS: diff --git a/hathor/transaction/merge_mined_block.py b/hathor/transaction/merge_mined_block.py index e6fbd5669..121011a23 100644 --- a/hathor/transaction/merge_mined_block.py +++ b/hathor/transaction/merge_mined_block.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict, List, Optional +from typing import TYPE_CHECKING, Any, Optional from hathor.transaction.aux_pow import BitcoinAuxPow from hathor.transaction.base_transaction import TxOutput, TxVersion @@ -30,8 +30,8 @@ def __init__(self, signal_bits: int = 0, version: int = TxVersion.MERGE_MINED_BLOCK, weight: float = 0, - outputs: Optional[List[TxOutput]] = None, - parents: Optional[List[bytes]] = None, + outputs: Optional[list[TxOutput]] = None, + parents: Optional[list[bytes]] = None, hash: Optional[bytes] = None, data: bytes = b'', aux_pow: Optional[BitcoinAuxPow] = None, @@ -40,7 +40,7 @@ def __init__(self, data=data, outputs=outputs or [], parents=parents or [], hash=hash, storage=storage) self.aux_pow = aux_pow - def _get_formatted_fields_dict(self, short: bool = True) -> Dict[str, str]: + def _get_formatted_fields_dict(self, short: bool = True) -> dict[str, str]: from hathor.util import abbrev d = super()._get_formatted_fields_dict(short) del d['nonce'] @@ -69,7 +69,7 @@ def get_struct_nonce(self) -> bytes: return dummy_bytes return bytes(self.aux_pow) - def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> Dict[str, Any]: + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: json = super().to_json(decode_script=decode_script, include_metadata=include_metadata) del json['nonce'] json['aux_pow'] = bytes(self.aux_pow).hex() if self.aux_pow else None diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index f389552e1..438d1f23d 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -13,7 +13,6 @@ # limitations under the License. import base64 -from typing import Dict, List from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource @@ -25,7 +24,7 @@ from hathor.util import api_catch_exceptions, json_dumpb, json_loadb -def from_raw_output(raw_output: Dict, tokens: List[bytes]) -> TxOutput: +def from_raw_output(raw_output: dict, tokens: list[bytes]) -> TxOutput: value = raw_output['value'] token_uid = raw_output.get('token_uid') if token_uid is not None: diff --git a/hathor/transaction/resources/push_tx.py b/hathor/transaction/resources/push_tx.py index ab98bac36..6c3dff6c5 100644 --- a/hathor/transaction/resources/push_tx.py +++ b/hathor/transaction/resources/push_tx.py @@ -14,7 +14,7 @@ import struct from json import JSONDecodeError -from typing import TYPE_CHECKING, Any, Dict, Optional, cast +from typing import TYPE_CHECKING, Any, Optional, cast from structlog import get_logger from twisted.web.http import Request @@ -67,7 +67,7 @@ def _get_client_ip(self, request: 'Request') -> str: addr = request.getClientAddress() return getattr(addr, 'host', 'unknown') - def handle_push_tx(self, params: Dict[str, Any], client_addr: str) -> Dict[str, Any]: + def handle_push_tx(self, params: dict[str, Any], client_addr: str) -> dict[str, Any]: try: tx_bytes = bytes.fromhex(params['hex_tx']) tx = tx_or_block_from_bytes(tx_bytes) @@ -154,8 +154,8 @@ def render_POST(self, request: Request) -> bytes: if not isinstance(data, dict): return error_ret - # Need to do that because json_loadb returns an object, which is not compatible with Dict[str, Any] - data = cast(Dict[str, Any], data) + # Need to do that because json_loadb returns an object, which is not compatible with dict[str, Any] + data = cast(dict[str, Any], data) if 'hex_tx' not in data: return error_ret diff --git a/hathor/transaction/resources/transaction.py b/hathor/transaction/resources/transaction.py index 148138c00..5763be4f8 100644 --- a/hathor/transaction/resources/transaction.py +++ b/hathor/transaction/resources/transaction.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict +from typing import Any from hathor.api_util import ( Resource, @@ -34,7 +34,7 @@ GET_LIST_ARGS = ['count', 'type'] -def update_serialized_tokens_array(tx: BaseTransaction, serialized: Dict[str, Any]) -> None: +def update_serialized_tokens_array(tx: BaseTransaction, serialized: dict[str, Any]) -> None: """ A token creation tx to_json does not add its hash to the array of tokens We manually have to add it here to make it equal to the other transactions """ @@ -44,7 +44,7 @@ def update_serialized_tokens_array(tx: BaseTransaction, serialized: Dict[str, An serialized['tokens'] = [h.hex() for h in tx.tokens] -def get_tx_extra_data(tx: BaseTransaction) -> Dict[str, Any]: +def get_tx_extra_data(tx: BaseTransaction) -> dict[str, Any]: """ Get the data of a tx to be returned to the frontend Returns success, tx serializes, metadata and spent outputs """ @@ -78,7 +78,7 @@ def get_tx_extra_data(tx: BaseTransaction) -> Dict[str, Any]: break # Maps the token uid to the token_data value - token_uid_map: Dict[bytes, int] = {settings.HATHOR_TOKEN_UID: 0} + token_uid_map: dict[bytes, int] = {settings.HATHOR_TOKEN_UID: 0} # Sending also output information for each input inputs = [] diff --git a/hathor/transaction/resources/transaction_confirmation.py b/hathor/transaction/resources/transaction_confirmation.py index bc74f42d2..d60526491 100644 --- a/hathor/transaction/resources/transaction_confirmation.py +++ b/hathor/transaction/resources/transaction_confirmation.py @@ -13,7 +13,7 @@ # limitations under the License. from math import log -from typing import Any, Dict +from typing import Any from hathor.api_util import Resource, get_args, get_missing_params_msg, set_cors, validate_tx_hash from hathor.cli.openapi_files.register import register_resource @@ -32,7 +32,7 @@ def __init__(self, manager): # Important to have the manager so we can know the tx_storage self.manager = manager - def _render_GET_data(self, requested_hash: str) -> Dict[str, Any]: + def _render_GET_data(self, requested_hash: str) -> dict[str, Any]: success, message = validate_tx_hash(requested_hash, self.manager.tx_storage) if not success: return {'success': False, 'message': message} @@ -44,7 +44,7 @@ def _render_GET_data(self, requested_hash: str) -> Dict[str, Any]: return {'success': False, 'message': 'not allowed on blocks'} meta = tx.get_metadata() - data: Dict[str, Any] = {'success': True} + data: dict[str, Any] = {'success': True} if meta.first_block: block = self.manager.tx_storage.get_transaction(meta.first_block) diff --git a/hathor/transaction/scripts.py b/hathor/transaction/scripts.py index 87f18cbb0..ec2426a79 100644 --- a/hathor/transaction/scripts.py +++ b/hathor/transaction/scripts.py @@ -18,7 +18,7 @@ import struct from abc import ABC, abstractmethod from enum import IntEnum -from typing import Any, Callable, Dict, Generator, List, NamedTuple, Optional, Pattern, Type, Union +from typing import Any, Callable, Generator, NamedTuple, Optional, Pattern, Union from cryptography.exceptions import InvalidSignature from cryptography.hazmat.primitives import hashes @@ -53,11 +53,11 @@ # XXX: Because the Stack is a heterogeneous list of bytes and int, and some OPs only work for when the stack has some # or the other type, there are many places that require an assert to prevent the wrong type from being used, -# alternatives include: 1. only using `List[bytes]` and operations that work on `int` to build them from `bytes`, -# 2. using `bytearray` instead of `List[...]` and using type codes on the stack or at least value sizes on the +# alternatives include: 1. only using `list[bytes]` and operations that work on `int` to build them from `bytes`, +# 2. using `bytearray` instead of `list[...]` and using type codes on the stack or at least value sizes on the # stack and OPs should use the extra info accordingly 3. using some "in stack error" at least custom exceptions # for signaling that an OP was applied on a wrongly typed stack. -Stack = List[Union[bytes, int, str]] +Stack = list[Union[bytes, int, str]] class ScriptExtras(NamedTuple): @@ -228,7 +228,7 @@ class BaseScript(ABC): """ @abstractmethod - def to_human_readable(self) -> Dict[str, Any]: + def to_human_readable(self) -> dict[str, Any]: """Return a nice dict for using on informational json APIs.""" raise NotImplementedError @@ -278,8 +278,8 @@ def __init__(self, address: str, timelock: Optional[int] = None) -> None: self.address = address self.timelock = timelock - def to_human_readable(self) -> Dict[str, Any]: - ret: Dict[str, Any] = {} + def to_human_readable(self) -> dict[str, Any]: + ret: dict[str, Any] = {} ret['type'] = self.get_type() ret['address'] = self.address ret['timelock'] = self.timelock @@ -382,13 +382,13 @@ def __init__(self, address: str, timelock: Optional[Any] = None) -> None: self.address = address self.timelock = timelock - def to_human_readable(self) -> Dict[str, Any]: + def to_human_readable(self) -> dict[str, Any]: """ Decode MultiSig class to dict with its type and data - :return: Dict with MultiSig info - :rtype: Dict[str:] + :return: dict with MultiSig info + :rtype: dict[str:] """ - ret: Dict[str, Any] = {} + ret: dict[str, Any] = {} ret['type'] = self.get_type() ret['address'] = self.address ret['timelock'] = self.timelock @@ -447,13 +447,13 @@ def create_output_script(cls, address: bytes, timelock: Optional[Any] = None) -> return s.data @classmethod - def create_input_data(cls, redeem_script: bytes, signatures: List[bytes]) -> bytes: + def create_input_data(cls, redeem_script: bytes, signatures: list[bytes]) -> bytes: """ :param redeem_script: script to redeem the tokens: ... :type redeem_script: bytes :param signatures: array of signatures to validate the input and redeem the tokens - :type signagures: List[bytes] + :type signagures: list[bytes] :rtype: bytes """ @@ -542,7 +542,7 @@ def __init__(self, oracle_pubkey_hash, min_timestamp, oracle_data_id, value_dict :param value_dict: a dictionary with the pubKeyHash and corresponding value ({pubKeyHash, value}). The pubkeyHash with value matching the data sent by oracle will be able to spend the contract funds - :type value_dict: Dict[bytes, int] + :type value_dict: dict[bytes, int] :param fallback_pubkey_hash: if none of the values match, this pubkey hash identifies the winner address :type fallback_pubkey_hash: bytes @@ -550,11 +550,11 @@ def __init__(self, oracle_pubkey_hash, min_timestamp, oracle_data_id, value_dict self.oracle_pubkey_hash = oracle_pubkey_hash self.min_timestamp = min_timestamp self.oracle_data_id = oracle_data_id - self.value_dict = value_dict # Dict[bytes, int] + self.value_dict = value_dict # dict[bytes, int] self.fallback_pubkey_hash = fallback_pubkey_hash - def to_human_readable(self) -> Dict[str, Any]: - ret: Dict[str, Any] = {} + def to_human_readable(self) -> dict[str, Any]: + ret: dict[str, Any] = {} ret['type'] = 'NanoContractMatchValues' ret['oracle_pubkey_hash'] = base64.b64encode(self.oracle_pubkey_hash).decode('utf-8') ret['min_timestamp'] = self.min_timestamp @@ -731,7 +731,7 @@ def parse_address_script(script: bytes) -> Optional[Union[P2PKH, MultiSig]]: :return: P2PKH or MultiSig class or None :rtype: class or None """ - script_classes: List[Type[Union[P2PKH, MultiSig]]] = [P2PKH, MultiSig] + script_classes: list[type[Union[P2PKH, MultiSig]]] = [P2PKH, MultiSig] # Each class verifies its script for script_class in script_classes: if script_class.re_match.search(script): @@ -960,14 +960,14 @@ def get_sigops_count(data: bytes, output_script: Optional[bytes] = None) -> int: return count_sigops(data) -def execute_eval(data: bytes, log: List[str], extras: ScriptExtras) -> None: +def execute_eval(data: bytes, log: list[str], extras: ScriptExtras) -> None: """ Execute eval from data executing opcode methods :param data: data to be evaluated that contains data and opcodes :type data: bytes - :param log: List of log messages - :type log: List[str] + :param log: list of log messages + :type log: list[str] :param extras: namedtuple with extra fields :type extras: :py:class:`hathor.transaction.scripts.ScriptExtras` @@ -993,7 +993,7 @@ def execute_eval(data: bytes, log: List[str], extras: ScriptExtras) -> None: evaluate_final_stack(stack, log) -def evaluate_final_stack(stack: Stack, log: List[str]) -> None: +def evaluate_final_stack(stack: Stack, log: list[str]) -> None: """ Checks the final state of the stack. It's valid if only has 1 value on stack and that value is 1 (true) """ @@ -1027,7 +1027,7 @@ def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> No """ input_data = txin.data output_script = spent_tx.outputs[txin.index].script - log: List[str] = [] + log: list[str] = [] extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) if MultiSig.re_match.search(output_script): @@ -1127,7 +1127,7 @@ def op_pushdata(position: int, full_data: bytes, stack: Stack) -> int: :type full_data: bytes :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises OutOfData: if data length to read is larger than what's available @@ -1150,7 +1150,7 @@ def op_pushdata1(position: int, full_data: bytes, stack: Stack) -> int: :type full_data: bytes :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises OutOfData: if data length to read is larger than what's available @@ -1162,11 +1162,11 @@ def op_pushdata1(position: int, full_data: bytes, stack: Stack) -> int: return new_pos -def op_dup(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_dup(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Duplicates item on top of stack :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there's no element on stack """ @@ -1175,13 +1175,13 @@ def op_dup(stack: Stack, log: List[str], extras: ScriptExtras) -> None: stack.append(stack[-1]) -def op_greaterthan_timestamp(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_greaterthan_timestamp(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Check whether transaction's timestamp is greater than the top of stack The top of stack must be a big-endian u32int. :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there's no element on stack """ @@ -1195,11 +1195,11 @@ def op_greaterthan_timestamp(stack: Stack, log: List[str], extras: ScriptExtras) datetime.datetime.fromtimestamp(timelock).strftime("%m/%d/%Y %I:%M:%S %p"))) -def op_equalverify(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_equalverify(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Verifies top 2 elements from stack are equal :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there aren't 2 element on stack :raises EqualVerifyFailed: items don't match @@ -1212,13 +1212,13 @@ def op_equalverify(stack: Stack, log: List[str], extras: ScriptExtras) -> None: raise EqualVerifyFailed('Failed to verify if elements are equal') -def op_equal(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_equal(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Verifies top 2 elements from stack are equal In case they are the same, we push 1 to the stack and push 0 if they are different :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] """ if len(stack) < 2: raise MissingStackItems('OP_EQUAL: need 2 elements on stack, currently {}'.format(len(stack))) @@ -1233,12 +1233,12 @@ def op_equal(stack: Stack, log: List[str], extras: ScriptExtras) -> None: log.append('OP_EQUAL: failed. elements: {} {}'.format(elem1.hex(), elem2.hex())) -def op_checksig(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_checksig(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Verifies public key and signature match. Expects public key to be on top of stack, followed by signature. If they match, put 1 on stack (meaning True); otherwise, push 0 (False) :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there aren't 2 element on stack :raises ScriptError: if pubkey on stack is not a compressed public key @@ -1270,12 +1270,12 @@ def op_checksig(stack: Stack, log: List[str], extras: ScriptExtras) -> None: log.append('OP_CHECKSIG: failed') -def op_hash160(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_hash160(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Top stack item is hashed twice: first with SHA-256 and then with RIPEMD-160. Result is pushed back to stack. :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there's no element on stack """ @@ -1287,12 +1287,12 @@ def op_hash160(stack: Stack, log: List[str], extras: ScriptExtras) -> None: stack.append(new_elem) -def op_checkdatasig(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_checkdatasig(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Verifies public key, signature and data match. Expects public key to be on top of stack, followed by signature and data. If they match, put data on stack; otherwise, fail. :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there aren't 3 element on stack :raises OracleChecksigFailed: invalid signature, given data and public key @@ -1321,7 +1321,7 @@ def op_checkdatasig(stack: Stack, log: List[str], extras: ScriptExtras) -> None: raise OracleChecksigFailed from e -def op_data_strequal(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_data_strequal(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Equivalent to an OP_GET_DATA_STR followed by an OP_EQUALVERIFY. Consumes three parameters from stack: . Gets the kth value @@ -1329,7 +1329,7 @@ def op_data_strequal(stack: Stack, log: List[str], extras: ScriptExtras) -> None back on the stack. :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there aren't 3 element on stack :raises VerifyFailed: verification failed @@ -1352,14 +1352,14 @@ def op_data_strequal(stack: Stack, log: List[str], extras: ScriptExtras) -> None stack.append(data) -def op_data_greaterthan(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_data_greaterthan(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Equivalent to an OP_GET_DATA_INT followed by an OP_GREATERTHAN. Consumes three parameters from stack: . Gets the kth value from as an integer and verifies it's greater than . :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there aren't 3 element on stack :raises VerifyFailed: verification failed @@ -1388,11 +1388,11 @@ def op_data_greaterthan(stack: Stack, log: List[str], extras: ScriptExtras) -> N stack.append(data) -def op_data_match_interval(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_data_match_interval(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Equivalent to an OP_GET_DATA_INT followed by an OP_MATCH_INTERVAL. :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there aren't 3 element on stack :raises VerifyFailed: verification failed @@ -1440,11 +1440,11 @@ def op_data_match_interval(stack: Stack, log: List[str], extras: ScriptExtras) - stack.append(last_pubkey) -def op_data_match_value(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_data_match_value(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Equivalent to an OP_GET_DATA_STR followed by an OP_MATCH_VALUE. :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if there aren't 3 element on stack :raises VerifyFailed: verification failed @@ -1489,12 +1489,12 @@ def op_data_match_value(stack: Stack, log: List[str], extras: ScriptExtras) -> N stack.append(winner_pubkey) -def op_find_p2pkh(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_find_p2pkh(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Checks whether the current transaction has an output with a P2PKH script with the given public key hash and the same amount as the input. :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :param tx: Transaction to be added :type tx: :py:class:`hathor.transaction.BaseTransaction` @@ -1525,11 +1525,11 @@ def op_find_p2pkh(stack: Stack, log: List[str], extras: ScriptExtras) -> None: raise VerifyFailed -def op_checkmultisig(stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_checkmultisig(stack: Stack, log: list[str], extras: ScriptExtras) -> None: """Checks if it has the minimum signatures required and if all of them are valid :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] :raises MissingStackItems: if stack is empty or it has less signatures than the minimum required :raises VerifyFailed: verification failed @@ -1606,7 +1606,7 @@ def op_checkmultisig(stack: Stack, log: List[str], extras: ScriptExtras) -> None stack.append(1) -def op_integer(opcode: int, stack: Stack, log: List[str], extras: ScriptExtras) -> None: +def op_integer(opcode: int, stack: Stack, log: list[str], extras: ScriptExtras) -> None: """ Appends an integer to the stack We get the opcode comparing to all integers opcodes @@ -1618,7 +1618,7 @@ def op_integer(opcode: int, stack: Stack, log: List[str], extras: ScriptExtras) :type opcode: bytes :param stack: the stack used when evaluating the script - :type stack: List[] + :type stack: list[] """ try: stack.append(decode_opn(opcode)) @@ -1626,7 +1626,7 @@ def op_integer(opcode: int, stack: Stack, log: List[str], extras: ScriptExtras) raise ScriptError(e) from e -MAP_OPCODE_TO_FN: Dict[int, Callable[[Stack, List[str], ScriptExtras], None]] = { +MAP_OPCODE_TO_FN: dict[int, Callable[[Stack, list[str], ScriptExtras], None]] = { Opcode.OP_DUP: op_dup, Opcode.OP_EQUAL: op_equal, Opcode.OP_EQUALVERIFY: op_equalverify, diff --git a/hathor/transaction/storage/memory_storage.py b/hathor/transaction/storage/memory_storage.py index e4cd2cf7e..3f6660147 100644 --- a/hathor/transaction/storage/memory_storage.py +++ b/hathor/transaction/storage/memory_storage.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Iterator, Optional, TypeVar +from typing import Any, Iterator, Optional, TypeVar from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.storage.migrations import MigrationState @@ -30,10 +30,10 @@ def __init__(self, with_index: bool = True, *, _clone_if_needed: bool = False) - transaction/blocks/metadata when returning those objects. :type _clone_if_needed: bool """ - self.transactions: Dict[bytes, BaseTransaction] = {} - self.metadata: Dict[bytes, TransactionMetadata] = {} + self.transactions: dict[bytes, BaseTransaction] = {} + self.metadata: dict[bytes, TransactionMetadata] = {} # Store custom key/value attributes - self.attributes: Dict[str, Any] = {} + self.attributes: dict[str, Any] = {} self._clone_if_needed = _clone_if_needed super().__init__(with_index=with_index) diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index 5daa51815..003e6097f 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Dict, Iterator, List, Optional +from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger @@ -182,8 +182,8 @@ def is_empty(self) -> bool: def get_sst_files_sizes_by_cf( self, - cfs: Optional[List['rocksdb.ColumnFamilyHandle']] = None - ) -> Dict[bytes, float]: + cfs: Optional[list['rocksdb.ColumnFamilyHandle']] = None + ) -> dict[bytes, float]: """Get the SST files sizes of each Column Family in bytes :param cfs: The list of column families, defaults to None, in which case all of them are returned @@ -191,7 +191,7 @@ def get_sst_files_sizes_by_cf( """ column_families = self._db.column_families if cfs is None else cfs - sizes: Dict[bytes, float] = {} + sizes: dict[bytes, float] = {} for cf in column_families: sizes[cf.name] = float(self._db.get_property(b'rocksdb.total-sst-files-size', cf)) diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index 8970cc8ee..ead75d7ad 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -17,7 +17,7 @@ from collections import defaultdict, deque from contextlib import AbstractContextManager from threading import Lock -from typing import Any, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple, Type, cast +from typing import Any, Iterator, NamedTuple, Optional, cast from weakref import WeakValueDictionary from intervaltree.interval import Interval @@ -53,9 +53,9 @@ class AllTipsCache(NamedTuple): timestamp: int - tips: Set[Interval] + tips: set[Interval] merkle_tree: bytes - hashes: List[bytes] + hashes: list[bytes] class TransactionStorage(ABC): @@ -79,11 +79,11 @@ class TransactionStorage(ABC): _last_start_attribute: str = 'last_start' # history of migrations that have to be applied in the order defined here - _migration_factories: List[Type[BaseMigration]] = [ + _migration_factories: list[type[BaseMigration]] = [ add_min_height_metadata.Migration, ] - _migrations: List[BaseMigration] + _migrations: list[BaseMigration] def __init__(self) -> None: # Weakref is used to guarantee that there is only one instance of each transaction in memory. @@ -105,7 +105,7 @@ def __init__(self) -> None: # Cache for the best block tips # This cache is updated in the consensus algorithm. - self._best_block_tips_cache: Optional[List[bytes]] = None + self._best_block_tips_cache: Optional[list[bytes]] = None # If should create lock when getting a transaction self._should_lock = False @@ -119,7 +119,7 @@ def __init__(self) -> None: self._all_tips_cache: Optional[AllTipsCache] = None # Initialize cache for genesis transactions. - self._genesis_cache: Dict[bytes, BaseTransaction] = {} + self._genesis_cache: dict[bytes, BaseTransaction] = {} # Internal toggle to choose when to select topological DFS iterator, used only on some tests self._always_use_topological_dfs = False @@ -147,7 +147,7 @@ def is_empty(self) -> bool: """True when only genesis is present, useful for checking for a fresh database.""" raise NotImplementedError - def update_best_block_tips_cache(self, tips_cache: Optional[List[bytes]]) -> None: + def update_best_block_tips_cache(self, tips_cache: Optional[list[bytes]]) -> None: # XXX: check that the cache update is working properly, only used in unittests # XXX: this might not actually hold true in some cases, commenting out while we figure it out # if settings.SLOW_ASSERTS: @@ -307,7 +307,7 @@ def _save_or_verify_genesis(self) -> None: self._genesis_cache[tx2.hash] = tx2 self._saving_genesis = False - def _get_genesis_from_settings(self) -> List[BaseTransaction]: + def _get_genesis_from_settings(self) -> list[BaseTransaction]: """Return all genesis from settings.""" from hathor.transaction.genesis import _get_genesis_transactions_unsafe return _get_genesis_transactions_unsafe(self) @@ -446,7 +446,7 @@ def remove_transaction(self, tx: BaseTransaction) -> None: if self.indexes is not None: self.del_from_indexes(tx, remove_all=True, relax_assert=True) - def remove_transactions(self, txs: List[BaseTransaction]) -> None: + def remove_transactions(self, txs: list[BaseTransaction]) -> None: """Will remove all of the transactions on the list from the database. Special notes: @@ -458,8 +458,8 @@ def remove_transactions(self, txs: List[BaseTransaction]) -> None: - parent's children metadata will be updated to reflect the removals - all indexes will be updated """ - parents_to_update: Dict[bytes, List[bytes]] = defaultdict(list) - dangling_children: Set[bytes] = set() + parents_to_update: dict[bytes, list[bytes]] = defaultdict(list) + dangling_children: set[bytes] = set() txset = {not_none(tx.hash) for tx in txs} for tx in txs: assert tx.hash is not None @@ -594,7 +594,7 @@ def first_timestamp(self) -> int: raise NotImplementedError @abstractmethod - def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> List[bytes]: + def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> list[bytes]: """ Return a list of blocks that are heads in a best chain. It must be used when mining. When more than one block is returned, it means that there are multiple best chains and @@ -604,7 +604,7 @@ def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: return self._best_block_tips_cache[:] best_score = 0.0 - best_tip_blocks: List[bytes] = [] + best_tip_blocks: list[bytes] = [] for block_hash in (x.data for x in self.get_block_tips(timestamp)): meta = self.get_metadata(block_hash) @@ -643,10 +643,10 @@ def get_height_best_block(self) -> int: return highest_height @cpu.profiler('get_merkle_tree') - def get_merkle_tree(self, timestamp: int) -> Tuple[bytes, List[bytes]]: + def get_merkle_tree(self, timestamp: int) -> tuple[bytes, list[bytes]]: """ Generate a hash to check whether the DAG is the same at that timestamp. - :rtype: Tuple[bytes(hash), List[bytes(hash)]] + :rtype: tuple[bytes(hash), list[bytes(hash)]] """ if self._all_tips_cache is not None and timestamp >= self._all_tips_cache.timestamp: return self._all_tips_cache.merkle_tree, self._all_tips_cache.hashes @@ -659,10 +659,10 @@ def get_merkle_tree(self, timestamp: int) -> Tuple[bytes, List[bytes]]: return self.calculate_merkle_tree(intervals) - def calculate_merkle_tree(self, intervals: Set[Interval]) -> Tuple[bytes, List[bytes]]: + def calculate_merkle_tree(self, intervals: set[Interval]) -> tuple[bytes, list[bytes]]: """ Generate a hash of the transactions at the intervals - :rtype: Tuple[bytes(hash), List[bytes(hash)]] + :rtype: tuple[bytes(hash), list[bytes(hash)]] """ hashes = [x.data for x in intervals] hashes.sort() @@ -674,19 +674,19 @@ def calculate_merkle_tree(self, intervals: Set[Interval]) -> Tuple[bytes, List[b return merkle.digest(), hashes @abstractmethod - def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: + def get_block_tips(self, timestamp: Optional[float] = None) -> set[Interval]: raise NotImplementedError @abstractmethod - def get_all_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: + def get_all_tips(self, timestamp: Optional[float] = None) -> set[Interval]: raise NotImplementedError @abstractmethod - def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: + def get_tx_tips(self, timestamp: Optional[float] = None) -> set[Interval]: raise NotImplementedError @abstractmethod - def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]: + def get_newest_blocks(self, count: int) -> tuple[list[Block], bool]: """ Get blocks from the newest to the oldest :param count: Number of blocks to be returned @@ -695,7 +695,7 @@ def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]: raise NotImplementedError @abstractmethod - def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]: + def get_newest_txs(self, count: int) -> tuple[list[BaseTransaction], bool]: """ Get transactions from the newest to the oldest :param count: Number of transactions to be returned @@ -705,7 +705,7 @@ def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]: @abstractmethod def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, - count: int) -> Tuple[List[Block], bool]: + count: int) -> tuple[list[Block], bool]: """ Get blocks from the timestamp/hash_bytes reference to the oldest :param timestamp: Timestamp reference to start the search @@ -717,7 +717,7 @@ def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, @abstractmethod def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes, - count: int) -> Tuple[List[BaseTransaction], bool]: + count: int) -> tuple[list[BaseTransaction], bool]: """ Get blocks from the timestamp/hash_bytes reference to the newest :param timestamp: Timestamp reference to start the search @@ -728,7 +728,7 @@ def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes, raise NotImplementedError @abstractmethod - def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]: + def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]: """ Get transactions from the timestamp/hash_bytes reference to the oldest :param timestamp: Timestamp reference to start the search @@ -739,7 +739,7 @@ def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> raise NotImplementedError @abstractmethod - def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]: + def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]: """ Get transactions from the timestamp/hash_bytes reference to the newest :param timestamp: Timestamp reference to start the search @@ -849,11 +849,11 @@ def get_genesis(self, hash_bytes: bytes) -> Optional[BaseTransaction]: raise NotImplementedError @abstractmethod - def get_all_genesis(self) -> Set[BaseTransaction]: + def get_all_genesis(self) -> set[BaseTransaction]: raise NotImplementedError @abstractmethod - def get_transactions_before(self, hash_bytes: bytes, num_blocks: int = 100) -> List[BaseTransaction]: + def get_transactions_before(self, hash_bytes: bytes, num_blocks: int = 100) -> list[BaseTransaction]: """Run a BFS starting from the giving `hash_bytes`. :param hash_bytes: Starting point of the BFS, either a block or a transaction. @@ -863,7 +863,7 @@ def get_transactions_before(self, hash_bytes: bytes, num_blocks: int = 100) -> L raise NotImplementedError @abstractmethod - def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> List[Block]: + def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> list[Block]: """Run a BFS starting from the giving `hash_bytes`. :param hash_bytes: Starting point of the BFS. @@ -1022,11 +1022,11 @@ def iter_mempool_from_best_index(self) -> Iterator[Transaction]: else: yield from self.iter_mempool_from_tx_tips() - def compute_transactions_that_became_invalid(self) -> List[BaseTransaction]: + def compute_transactions_that_became_invalid(self) -> list[BaseTransaction]: """ This method will look for transactions in the mempool that have became invalid due to the reward lock. """ from hathor.transaction.validation_state import ValidationState - to_remove: List[BaseTransaction] = [] + to_remove: list[BaseTransaction] = [] for tx in self.iter_mempool_from_best_index(): if tx.is_spent_reward_locked(): tx.set_validation(ValidationState.INVALID) @@ -1080,13 +1080,13 @@ def remove_cache(self) -> None: """Remove all caches in case we don't need it.""" self.indexes = None - def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> List[bytes]: + def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> list[bytes]: return super().get_best_block_tips(timestamp, skip_cache=skip_cache) def get_weight_best_block(self) -> float: return super().get_weight_best_block() - def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: + def get_block_tips(self, timestamp: Optional[float] = None) -> set[Interval]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1094,7 +1094,7 @@ def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: timestamp = self.latest_timestamp return self.indexes.block_tips[timestamp] - def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: + def get_tx_tips(self, timestamp: Optional[float] = None) -> set[Interval]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1111,7 +1111,7 @@ def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: return tips - def get_all_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: + def get_all_tips(self, timestamp: Optional[float] = None) -> set[Interval]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1129,7 +1129,7 @@ def get_all_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: return tips - def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]: + def get_newest_blocks(self, count: int) -> tuple[list[Block], bool]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1137,7 +1137,7 @@ def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]: blocks = [cast(Block, self.get_transaction(block_hash)) for block_hash in block_hashes] return blocks, has_more - def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]: + def get_newest_txs(self, count: int) -> tuple[list[BaseTransaction], bool]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1145,7 +1145,7 @@ def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]: txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes] return txs, has_more - def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[Block], bool]: + def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[Block], bool]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1154,7 +1154,7 @@ def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) return blocks, has_more def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes, - count: int) -> Tuple[List[BaseTransaction], bool]: + count: int) -> tuple[list[BaseTransaction], bool]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1162,7 +1162,7 @@ def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes, blocks = [self.get_transaction(block_hash) for block_hash in block_hashes] return blocks, has_more - def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]: + def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1170,7 +1170,7 @@ def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> txs = [self.get_transaction(tx_hash) for tx_hash in tx_hashes] return txs, has_more - def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]: + def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> tuple[list[BaseTransaction], bool]: if self.indexes is None: raise NotImplementedError assert self.indexes is not None @@ -1189,8 +1189,8 @@ def _topological_sort_timestamp_index(self) -> Iterator[BaseTransaction]: assert self.indexes is not None cur_timestamp: Optional[int] = None - cur_blocks: List[Block] = [] - cur_txs: List[Transaction] = [] + cur_blocks: list[Block] = [] + cur_txs: list[Transaction] = [] for tx_hash in self.indexes.sorted_all.iter(): tx = self.get_transaction(tx_hash) if tx.timestamp != cur_timestamp: @@ -1225,8 +1225,8 @@ def __init__(self, tx: BaseTransaction): self.is_transaction = tx.is_transaction self.tx = tx - to_visit: List[Item] = list(map(Item, self.get_all_genesis())) - seen: Set[bytes] = set() + to_visit: list[Item] = list(map(Item, self.get_all_genesis())) + seen: set[bytes] = set() heapq.heapify(to_visit) while to_visit: item = heapq.heappop(to_visit) @@ -1249,7 +1249,7 @@ def _topological_sort_dfs(self) -> Iterator[BaseTransaction]: # Sorting the vertices by the lengths of their longest incoming paths produces a topological # ordering (Dekel, Nassimi & Sahni 1981). See: https://epubs.siam.org/doi/10.1137/0210049 # See also: https://gitlab.com/HathorNetwork/hathor-python/merge_requests/31 - visited: Dict[bytes, int] = dict() # Dict[bytes, int] + visited: dict[bytes, int] = dict() # dict[bytes, int] for tx in self.get_all_transactions(): if not tx.is_block: continue @@ -1257,7 +1257,7 @@ def _topological_sort_dfs(self) -> Iterator[BaseTransaction]: for tx in self.get_all_transactions(): yield from self._run_topological_sort_dfs(tx, visited) - def _run_topological_sort_dfs(self, root: BaseTransaction, visited: Dict[bytes, int]) -> Iterator[BaseTransaction]: + def _run_topological_sort_dfs(self, root: BaseTransaction, visited: dict[bytes, int]) -> Iterator[BaseTransaction]: if root.hash in visited: return @@ -1338,25 +1338,25 @@ def get_genesis(self, hash_bytes: bytes) -> Optional[BaseTransaction]: assert self._genesis_cache is not None return self._genesis_cache.get(hash_bytes, None) - def get_all_genesis(self) -> Set[BaseTransaction]: + def get_all_genesis(self) -> set[BaseTransaction]: assert self._genesis_cache is not None return set(self._genesis_cache.values()) def get_transactions_before(self, hash_bytes: bytes, - num_blocks: int = 100) -> List[BaseTransaction]: # pragma: no cover + num_blocks: int = 100) -> list[BaseTransaction]: # pragma: no cover ref_tx = self.get_transaction(hash_bytes) - visited: Dict[bytes, int] = dict() # Dict[bytes, int] + visited: dict[bytes, int] = dict() # dict[bytes, int] result = [x for x in self._run_topological_sort_dfs(ref_tx, visited) if not x.is_block] result = result[-num_blocks:] return result - def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> List[Block]: + def get_blocks_before(self, hash_bytes: bytes, num_blocks: int = 100) -> list[Block]: ref_tx = self.get_transaction(hash_bytes) if not ref_tx.is_block: raise TransactionIsNotABlock - result = [] # List[Block] - pending_visits = deque(ref_tx.parents) # List[bytes] - used = set(pending_visits) # Set[bytes] + result = [] # list[Block] + pending_visits = deque(ref_tx.parents) # list[bytes] + used = set(pending_visits) # set[bytes] while pending_visits: tx_hash = pending_visits.popleft() tx = self.get_transaction(tx_hash) diff --git a/hathor/transaction/storage/traversal.py b/hathor/transaction/storage/traversal.py index 4f541599e..aef55069e 100644 --- a/hathor/transaction/storage/traversal.py +++ b/hathor/transaction/storage/traversal.py @@ -16,7 +16,7 @@ import heapq from abc import ABC, abstractmethod from itertools import chain -from typing import TYPE_CHECKING, Any, Iterable, Iterator, List, Optional, Set, Union +from typing import TYPE_CHECKING, Any, Iterable, Iterator, Optional, Union if TYPE_CHECKING: from hathor.transaction import BaseTransaction # noqa: F401 @@ -43,8 +43,8 @@ def __le__(self, other: 'HeapItem') -> bool: class GenericWalk(ABC): """ A helper class to walk on the DAG. """ - seen: Set[bytes] - to_visit: List[Any] + seen: set[bytes] + to_visit: list[Any] def __init__(self, storage: 'TransactionStorage', *, is_dag_funds: bool = False, is_dag_verifications: bool = False, is_left_to_right: bool = True): @@ -145,7 +145,7 @@ def run(self, root: Union['BaseTransaction', Iterable['BaseTransaction']], *, class BFSWalk(GenericWalk): """ A help to walk in the DAG using a BFS. """ - to_visit: List[HeapItem] + to_visit: list[HeapItem] def _push_visit(self, tx: 'BaseTransaction') -> None: heapq.heappush(self.to_visit, HeapItem(tx, reverse=self._reverse_heap)) @@ -163,7 +163,7 @@ def _pop_visit(self) -> 'BaseTransaction': class DFSWalk(GenericWalk): """ A help to walk in the DAG using a DFS. """ - to_visit: List['BaseTransaction'] + to_visit: list['BaseTransaction'] def _push_visit(self, tx: 'BaseTransaction') -> None: self.to_visit.append(tx) diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index beafa4cab..c70ca761c 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -13,7 +13,7 @@ # limitations under the License. from struct import error as StructError, pack -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from hathor.conf import HathorSettings from hathor.transaction.base_transaction import TxInput, TxOutput, TxVersion @@ -42,9 +42,9 @@ def __init__(self, signal_bits: int = 0, version: int = TxVersion.TOKEN_CREATION_TRANSACTION, weight: float = 0, - inputs: Optional[List[TxInput]] = None, - outputs: Optional[List[TxOutput]] = None, - parents: Optional[List[bytes]] = None, + inputs: Optional[list[TxInput]] = None, + outputs: Optional[list[TxOutput]] = None, + parents: Optional[list[bytes]] = None, hash: Optional[bytes] = None, token_name: str = '', token_symbol: str = '', @@ -181,7 +181,7 @@ def serialize_token_info(self) -> bytes: return ret @classmethod - def deserialize_token_info(cls, buf: bytes, *, verbose: VerboseCallback = None) -> Tuple[str, str, bytes]: + def deserialize_token_info(cls, buf: bytes, *, verbose: VerboseCallback = None) -> tuple[str, str, bytes]: """ Gets the token name and symbol from serialized format """ (token_info_version,), buf = unpack('!B', buf) @@ -209,14 +209,14 @@ def deserialize_token_info(cls, buf: bytes, *, verbose: VerboseCallback = None) return decoded_name, decoded_symbol, buf - def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> Dict[str, Any]: + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: json = super().to_json(decode_script=decode_script, include_metadata=include_metadata) json['token_name'] = self.token_name json['token_symbol'] = self.token_symbol json['tokens'] = [] return json - def to_json_extended(self) -> Dict[str, Any]: + def to_json_extended(self) -> dict[str, Any]: json = super().to_json_extended() json['token_name'] = self.token_name json['token_symbol'] = self.token_symbol diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index eb38ab005..e5de207c7 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -15,7 +15,7 @@ import hashlib from itertools import chain from struct import pack -from typing import TYPE_CHECKING, Any, Dict, Iterator, List, NamedTuple, Optional, Set, Tuple +from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Optional from hathor import daa from hathor.checkpoint import Checkpoint @@ -78,10 +78,10 @@ def __init__(self, signal_bits: int = 0, version: int = TxVersion.REGULAR_TRANSACTION, weight: float = 0, - inputs: Optional[List[TxInput]] = None, - outputs: Optional[List[TxOutput]] = None, - parents: Optional[List[VertexId]] = None, - tokens: Optional[List[TokenUid]] = None, + inputs: Optional[list[TxInput]] = None, + outputs: Optional[list[TxOutput]] = None, + parents: Optional[list[VertexId]] = None, + tokens: Optional[list[TokenUid]] = None, hash: Optional[VertexId] = None, storage: Optional['TransactionStorage'] = None) -> None: """ @@ -280,7 +280,7 @@ def get_token_uid(self, index: int) -> TokenUid: return settings.HATHOR_TOKEN_UID return self.tokens[index - 1] - def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> Dict[str, Any]: + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: json = super().to_json(decode_script=decode_script, include_metadata=include_metadata) json['tokens'] = [h.hex() for h in self.tokens] return json @@ -294,7 +294,7 @@ def verify_basic(self, skip_block_weight_verification: bool = False) -> None: self.verify_weight() self.verify_without_storage() - def verify_checkpoint(self, checkpoints: List[Checkpoint]) -> None: + def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: assert self.storage is not None if self.is_genesis: return @@ -413,10 +413,10 @@ def verify_outputs(self) -> None: if output.get_token_index() > len(self.tokens): raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) - def get_token_info_from_inputs(self) -> Dict[TokenUid, TokenInfo]: + def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ - token_dict: Dict[TokenUid, TokenInfo] = {} + token_dict: dict[TokenUid, TokenInfo] = {} default_info: TokenInfo = TokenInfo(0, False, False) @@ -440,7 +440,7 @@ def get_token_info_from_inputs(self) -> Dict[TokenUid, TokenInfo]: return token_dict - def update_token_info_from_outputs(self, token_dict: Dict[TokenUid, TokenInfo]) -> None: + def update_token_info_from_outputs(self, token_dict: dict[TokenUid, TokenInfo]) -> None: """Iterate over the outputs and add values to token info dict. Updates the dict in-place. Also, checks if no token has authorities on the outputs not present on the inputs @@ -471,7 +471,7 @@ def update_token_info_from_outputs(self, token_dict: Dict[TokenUid, TokenInfo]) sum_tokens = token_info.amount + tx_output.value token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) - def check_authorities_and_deposit(self, token_dict: Dict[TokenUid, TokenInfo]) -> None: + def check_authorities_and_deposit(self, token_dict: dict[TokenUid, TokenInfo]) -> None: """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs and outputs is not 0, make sure inputs have mint/melt authority. @@ -538,7 +538,7 @@ def verify_inputs(self, *, skip_script: bool = False) -> None: """Verify inputs signatures and ownership and all inputs actually exist""" from hathor.transaction.storage.exceptions import TransactionDoesNotExist - spent_outputs: Set[Tuple[VertexId, int]] = set() + spent_outputs: set[tuple[VertexId, int]] = set() for input_tx in self.inputs: if len(input_tx.data) > settings.MAX_INPUT_DATA_SIZE: raise InvalidInputDataSize('size: {} and max-size: {}'.format( diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index 9f70cff38..77bb54fab 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import defaultdict -from typing import TYPE_CHECKING, Any, Dict, FrozenSet, List, Optional, Set +from typing import TYPE_CHECKING, Any, Optional from hathor.transaction.validation_state import ValidationState from hathor.util import practically_equal @@ -27,13 +27,13 @@ class TransactionMetadata: hash: Optional[bytes] - spent_outputs: Dict[int, List[bytes]] + spent_outputs: dict[int, list[bytes]] # XXX: the following Optional[] types use None to replace empty set/list to reduce memory use - conflict_with: Optional[List[bytes]] - voided_by: Optional[Set[bytes]] - received_by: List[int] - children: List[bytes] - twins: List[bytes] + conflict_with: Optional[list[bytes]] + voided_by: Optional[set[bytes]] + received_by: list[int] + children: list[bytes] + twins: list[bytes] accumulated_weight: float score: float first_block: Optional[bytes] @@ -51,7 +51,7 @@ class TransactionMetadata: _last_voided_by_hash: Optional[int] _last_spent_by_hash: Optional[int] - def __init__(self, spent_outputs: Optional[Dict[int, List[bytes]]] = None, hash: Optional[bytes] = None, + def __init__(self, spent_outputs: Optional[dict[int, list[bytes]]] = None, hash: Optional[bytes] = None, accumulated_weight: float = 0, score: float = 0, height: int = 0, min_height: int = 0) -> None: from hathor.transaction.genesis import is_genesis @@ -188,8 +188,8 @@ def __eq__(self, other: Any) -> bool: return True - def to_json(self) -> Dict[str, Any]: - data: Dict[str, Any] = {} + def to_json(self) -> dict[str, Any]: + data: dict[str, Any] = {} data['hash'] = self.hash and self.hash.hex() data['spent_outputs'] = [] for idx, hashes in self.spent_outputs.items(): @@ -210,7 +210,7 @@ def to_json(self) -> Dict[str, Any]: data['validation'] = self.validation.name.lower() return data - def to_json_extended(self, tx_storage: 'TransactionStorage') -> Dict[str, Any]: + def to_json_extended(self, tx_storage: 'TransactionStorage') -> dict[str, Any]: data = self.to_json() first_block_height: Optional[int] if self.first_block is not None: @@ -222,7 +222,7 @@ def to_json_extended(self, tx_storage: 'TransactionStorage') -> Dict[str, Any]: return data @classmethod - def create_from_json(cls, data: Dict[str, Any]) -> 'TransactionMetadata': + def create_from_json(cls, data: dict[str, Any]) -> 'TransactionMetadata': from hathor.transaction.genesis import is_genesis meta = cls() @@ -288,7 +288,7 @@ def del_voided_by(self, item: bytes) -> None: if not self.voided_by: self.voided_by = None - def get_frozen_voided_by(self) -> FrozenSet[bytes]: + def get_frozen_voided_by(self) -> frozenset[bytes]: """Return a frozen set copy of voided_by.""" if self.voided_by is None: return frozenset() diff --git a/hathor/transaction/util.py b/hathor/transaction/util.py index fb9b1b9c7..e58b3d095 100644 --- a/hathor/transaction/util.py +++ b/hathor/transaction/util.py @@ -15,7 +15,7 @@ import re import struct from math import ceil, floor -from typing import Any, Callable, Optional, Tuple +from typing import Any, Callable, Optional from hathor.conf import HathorSettings @@ -46,7 +46,7 @@ def unpack(fmt: str, buf: bytes) -> Any: return struct.unpack(fmt, buf[:size]), buf[size:] -def unpack_len(n: int, buf: bytes) -> Tuple[bytes, bytes]: +def unpack_len(n: int, buf: bytes) -> tuple[bytes, bytes]: return buf[:n], buf[n:] diff --git a/hathor/util.py b/hathor/util.py index 7f9ed0344..41212e7b8 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -25,23 +25,7 @@ from enum import Enum from functools import partial, wraps from random import Random as PyRandom -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Deque, - Dict, - Iterable, - Iterator, - List, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, Iterable, Iterator, Optional, Sequence, TypeVar, Union, cast from structlog import get_logger from twisted.internet import reactor as twisted_reactor @@ -78,7 +62,7 @@ Z = TypeVar('Z', bound=Interface) -def practically_equal(a: Dict[Any, Any], b: Dict[Any, Any]) -> bool: +def practically_equal(a: dict[Any, Any], b: dict[Any, Any]) -> bool: """ Compare two defaultdict. It is used because a simple access have side effects in defaultdict. @@ -181,7 +165,7 @@ def ichunks(array: bytes, chunk_size: int) -> Iterator[bytes]: return takewhile(bool, (bytes(islice(idata, chunk_size)) for _ in repeat(None))) -def iwindows(iterable: Iterable[T], window_size: int) -> Iterator[Tuple[T, ...]]: +def iwindows(iterable: Iterable[T], window_size: int) -> Iterator[tuple[T, ...]]: """ Adapt iterator to yield windows of the given size. window_size must be greater than 0 @@ -200,7 +184,7 @@ def iwindows(iterable: Iterable[T], window_size: int) -> Iterator[Tuple[T, ...]] from collections import deque it = iter(iterable) assert window_size > 0 - res_item: Deque[T] = deque() + res_item: deque[T] = deque() while len(res_item) < window_size: res_item.append(next(it)) yield tuple(res_item) @@ -255,7 +239,7 @@ def __setitem__(self, key, value): self.popitem(False) -def json_loadb(raw: bytes) -> Dict: +def json_loadb(raw: bytes) -> dict: """Compact loading as UTF-8 encoded bytes/string to a Python object.""" import json @@ -270,7 +254,7 @@ def json_loadb(raw: bytes) -> Dict: # XXX: cast-converting the function saves a function-call, which can make a difference -json_loads = cast(Callable[[str], Dict], json_loadb) +json_loads = cast(Callable[[str], dict], json_loadb) def json_dumpb(obj: object) -> bytes: @@ -347,7 +331,7 @@ def geometric(self, p: float) -> int: """ return math.ceil(math.log(self.random()) / math.log(1 - p)) - def ordered_sample(self, seq: Sequence[T], k: int) -> List[T]: + def ordered_sample(self, seq: Sequence[T], k: int) -> list[T]: """Like self.sample but preserve orginal order. For example, ordered_sample([1, 2, 3]) will never return [3, 2] only [2, 3] instead.""" @@ -360,7 +344,7 @@ def randbytes(self, n): return self.getrandbits(n * 8).to_bytes(n, 'little') -def collect_n(it: Iterator[_T], n: int) -> Tuple[List[_T], bool]: +def collect_n(it: Iterator[_T], n: int) -> tuple[list[_T], bool]: """Collect up to n elements from an iterator into a list, returns the list and whether there were more elements. This method will consume up to n+1 elements from the iterator because it will try to get one more element after it @@ -382,7 +366,7 @@ def collect_n(it: Iterator[_T], n: int) -> Tuple[List[_T], bool]: """ if n < 0: raise ValueError(f'n must be non-negative, got {n}') - col: List[_T] = [] + col: list[_T] = [] has_more = False while n > 0: try: @@ -430,7 +414,7 @@ def skip_n(it: Iterator[_T], n: int) -> Iterator[_T]: return it -def verified_cast(interface_class: Type[Z], obj: Any) -> Z: +def verified_cast(interface_class: type[Z], obj: Any) -> Z: verifyObject(interface_class, obj) return obj @@ -618,11 +602,11 @@ class peekable(Iterator[T]): def __init__(self, it: Iterable[T]) -> None: self._it: Optional[Iterator[T]] = iter(it) - # XXX: using Optional[Tuple[T]] makes it so the iterator can yield None, and it would be correctly peekable, + # XXX: using Optional[tuple[T]] makes it so the iterator can yield None, and it would be correctly peekable, # which is different from not having a next element to peek into - self._head: Optional[Tuple[T]] = None + self._head: Optional[tuple[T]] = None - def _peek(self) -> Optional[Tuple[T]]: + def _peek(self) -> Optional[tuple[T]]: if self._head is None and self._it is None: return None if self._head is None: diff --git a/hathor/utils/list.py b/hathor/utils/list.py index 37c60ffe2..020b4121e 100644 --- a/hathor/utils/list.py +++ b/hathor/utils/list.py @@ -12,12 +12,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Optional, TypeVar +from typing import Optional, TypeVar T = TypeVar('T') -def single_or_none(_list: List[T]) -> Optional[T]: +def single_or_none(_list: list[T]) -> Optional[T]: """Function to convert a list with at most one element to the given element or None. >>> single_or_none([]) is None True diff --git a/hathor/utils/named_tuple.py b/hathor/utils/named_tuple.py index 2745cf3d9..e8064b7e4 100644 --- a/hathor/utils/named_tuple.py +++ b/hathor/utils/named_tuple.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, NamedTuple, Optional, Type, TypeVar +from typing import Any, NamedTuple, Optional, TypeVar import pydantic @@ -22,7 +22,7 @@ def validated_named_tuple_from_dict( - named_tuple_type: Type[T], + named_tuple_type: type[T], attributes_dict: dict[str, Any], *, validators: Optional[dict[str, classmethod]] = None diff --git a/hathor/wallet/base_wallet.py b/hathor/wallet/base_wallet.py index 32e7b8054..686f3dc74 100644 --- a/hathor/wallet/base_wallet.py +++ b/hathor/wallet/base_wallet.py @@ -17,7 +17,7 @@ from enum import Enum from itertools import chain from math import inf -from typing import Any, DefaultDict, Dict, Iterable, List, NamedTuple, Optional, Tuple, Union +from typing import Any, Iterable, NamedTuple, Optional, Union from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurvePrivateKey from pycoin.key.Key import Key @@ -69,7 +69,7 @@ class WalletBalanceUpdate(NamedTuple): class BaseWallet: reactor: Reactor - keys: Dict[str, Any] + keys: dict[str, Any] class WalletType(Enum): # Hierarchical Deterministic Wallet @@ -96,24 +96,24 @@ def __init__(self, directory: str = './', pubsub: Optional[PubSubManager] = None """ self.log = logger.new() - # Dict[token_id, Dict[Tuple[tx_id, index], UnspentTx]] - self.unspent_txs: DefaultDict[bytes, Dict[Tuple[bytes, int], UnspentTx]] = defaultdict(dict) + # dict[token_id, dict[tuple[tx_id, index], UnspentTx]] + self.unspent_txs: defaultdict[bytes, dict[tuple[bytes, int], UnspentTx]] = defaultdict(dict) - # Dict[token_id, Dict[Tuple[tx_id, index], UnspentTx]] - self.maybe_spent_txs: DefaultDict[bytes, Dict[Tuple[bytes, int], UnspentTx]] = defaultdict(dict) + # dict[token_id, dict[tuple[tx_id, index], UnspentTx]] + self.maybe_spent_txs: defaultdict[bytes, dict[tuple[bytes, int], UnspentTx]] = defaultdict(dict) - # Dict[Tuple(tx_id, index), List[SpentTx]] + # dict[tuple(tx_id, index), list[SpentTx]] # We have for each output, which txs spent it - self.spent_txs: Dict[Tuple[bytes, int], List['SpentTx']] = defaultdict(list) + self.spent_txs: dict[tuple[bytes, int], list['SpentTx']] = defaultdict(list) # Save each spent tx that was voided and is not spending tokens from this wallet anymore - self.voided_spent: Dict[Tuple[bytes, int], List['SpentTx']] = defaultdict(list) + self.voided_spent: dict[tuple[bytes, int], list['SpentTx']] = defaultdict(list) # Save each unspent tx that was voided and is not increasing the tokens of this wallet anymore - self.voided_unspent: Dict[Tuple[bytes, int], UnspentTx] = {} + self.voided_unspent: dict[tuple[bytes, int], UnspentTx] = {} # Wallet now has locked balance (with timelock) and available balance - self.balance: Dict[bytes, WalletBalance] = defaultdict(WalletBalance) + self.balance: dict[bytes, WalletBalance] = defaultdict(WalletBalance) # WalletBalanceUpdate object to store the callLater to update the balance self.balance_update: Optional[WalletBalanceUpdate] = None @@ -188,11 +188,11 @@ def tokens_received(self, address58: str) -> None: def get_private_key(self, address58: str) -> EllipticCurvePrivateKey: raise NotImplementedError - def get_input_aux_data(self, data_to_sign: bytes, private_key: Key) -> Tuple[bytes, bytes]: + def get_input_aux_data(self, data_to_sign: bytes, private_key: Key) -> tuple[bytes, bytes]: raise NotImplementedError - def prepare_transaction(self, cls: ABCMeta, inputs: List[WalletInputInfo], - outputs: List[WalletOutputInfo], timestamp: Optional[int] = None) -> Transaction: + def prepare_transaction(self, cls: ABCMeta, inputs: list[WalletInputInfo], + outputs: list[WalletOutputInfo], timestamp: Optional[int] = None) -> Transaction: """Prepares the tx inputs and outputs. Can be used to create blocks by passing empty list to inputs. @@ -201,17 +201,17 @@ def prepare_transaction(self, cls: ABCMeta, inputs: List[WalletInputInfo], :type cls: :py:class:`hathor.transaction.Block` or :py:class:`hathor.transaction.Transaction` :param inputs: the tx inputs - :type inputs: List[WalletInputInfo] + :type inputs: list[WalletInputInfo] :param outputs: the tx outputs - :type inputs: List[WalletOutputInfo] + :type inputs: list[WalletOutputInfo] :param timestamp: timestamp to use for the transaction :type timestamp: int """ tx_outputs = [] - token_dict: Dict[bytes, int] = {} # Dict[token_uid, index] - tokens = [] # List[bytes] = List[token_uid] + token_dict: dict[bytes, int] = {} # dict[token_uid, index] + tokens = [] # list[bytes] = list[token_uid] for txout in outputs: token_uid = bytes.fromhex(txout.token_uid) if token_uid == settings.HATHOR_TOKEN_UID: @@ -241,8 +241,8 @@ def prepare_transaction(self, cls: ABCMeta, inputs: List[WalletInputInfo], return tx - def prepare_transaction_incomplete_inputs(self, cls: ABCMeta, inputs: List[WalletInputInfo], - outputs: List[WalletOutputInfo], tx_storage: TransactionStorage, + def prepare_transaction_incomplete_inputs(self, cls: ABCMeta, inputs: list[WalletInputInfo], + outputs: list[WalletOutputInfo], tx_storage: TransactionStorage, force: bool = False, timestamp: Optional[int] = None) -> Transaction: """Uses prepare_transaction() to prepare transaction. @@ -257,10 +257,10 @@ def prepare_transaction_incomplete_inputs(self, cls: ABCMeta, inputs: List[Walle :type cls: Transaction or Block :param inputs: list of inputs of the tx - :type inputs: List[WalletInputInfo] + :type inputs: list[WalletInputInfo] :param outputs: list of outputs of the tx - :type outputs: List[WalletOutputInfo] + :type outputs: list[WalletOutputInfo] :param force: if True we will search the private key not only in the unspent txs this parameter, when set to True, can be used to allow a double spending @@ -278,12 +278,12 @@ def prepare_transaction_incomplete_inputs(self, cls: ABCMeta, inputs: List[Walle new_inputs = self.prepare_incomplete_inputs(inputs, tx_storage, force) return self.prepare_transaction(cls, new_inputs, outputs, timestamp) - def prepare_incomplete_inputs(self, inputs: List[WalletInputInfo], tx_storage: TransactionStorage, - force: bool = False) -> List[WalletInputInfo]: + def prepare_incomplete_inputs(self, inputs: list[WalletInputInfo], tx_storage: TransactionStorage, + force: bool = False) -> list[WalletInputInfo]: """Adds the keys to the inputs :param inputs: list of inputs of the tx - :type inputs: List[WalletInputInfo] + :type inputs: list[WalletInputInfo] :param force: if True we will search the private key not only in the unspent txs this parameter, when set to True, can be used to allow a double spending @@ -298,7 +298,7 @@ def prepare_incomplete_inputs(self, inputs: List[WalletInputInfo], tx_storage: T if len(inputs) != len(set(inputs)): # Same input is used more than once raise InputDuplicated - new_inputs: List[WalletInputInfo] = [] + new_inputs: list[WalletInputInfo] = [] for _input in inputs: new_input = None output_tx = tx_storage.get_transaction(_input.tx_id) @@ -329,7 +329,7 @@ def prepare_incomplete_inputs(self, inputs: List[WalletInputInfo], tx_storage: T return new_inputs def prepare_transaction_compute_inputs( - self, cls: ABCMeta, outputs: List[WalletOutputInfo], + self, cls: ABCMeta, outputs: list[WalletOutputInfo], tx_storage: 'TransactionStorage', timestamp: Optional[int] = None ) -> Transaction: """Calculates the inputs given the outputs and uses prepare_transaction() to prepare @@ -339,7 +339,7 @@ def prepare_transaction_compute_inputs( :type cls: :py:class:`hathor.transaction.Block` or :py:class:`hathor.transaction.Transaction` :param outputs: the tx outputs - :type outputs: List[WalletOutputInfo] + :type outputs: list[WalletOutputInfo] :param timestamp: the tx timestamp :type timestamp: int @@ -348,17 +348,17 @@ def prepare_transaction_compute_inputs( return self.prepare_transaction(cls, inputs, outputs, timestamp) def prepare_compute_inputs( - self, outputs: List[WalletOutputInfo], tx_storage: 'TransactionStorage', timestamp: Optional[int] = None - ) -> Tuple[List[WalletInputInfo], List[WalletOutputInfo]]: + self, outputs: list[WalletOutputInfo], tx_storage: 'TransactionStorage', timestamp: Optional[int] = None + ) -> tuple[list[WalletInputInfo], list[WalletOutputInfo]]: """Calculates the inputs given the outputs. Handles change. :param outputs: the tx outputs - :type outputs: List[WalletOutputInfo] + :type outputs: list[WalletOutputInfo] :param timestamp: the tx timestamp :type timestamp: int """ - token_dict: Dict[bytes, int] = defaultdict(int) + token_dict: dict[bytes, int] = defaultdict(int) for output in outputs: token_uid = bytes.fromhex(output.token_uid) token_dict[token_uid] += output.value @@ -376,21 +376,21 @@ def prepare_compute_inputs( tx_inputs.extend(inputs) return tx_inputs, outputs - def separate_inputs(self, inputs: List['TxInput'], - tx_storage: 'TransactionStorage') -> Tuple[List['TxInput'], List['TxInput']]: + def separate_inputs(self, inputs: list['TxInput'], + tx_storage: 'TransactionStorage') -> tuple[list['TxInput'], list['TxInput']]: """Separates the inputs from a tx into 2 groups: the ones that belong to this wallet and the ones that don't :param inputs: transaction to decode - :type inputs: List[py:class:`hathor.transaction.TxInput`] + :type inputs: list[py:class:`hathor.transaction.TxInput`] :return my_inputs: list of all inputs belonging to this wallet - :rtype my_inputs: List[py:class:`hathor.transaction.TxInput`] + :rtype my_inputs: list[py:class:`hathor.transaction.TxInput`] :param tx_storage: storage to search for output tx :type tx_storage: TransactionStorage :return other_inputs: list of all inputs NOT belonging to this wallet - :rtype other_inputs: List[py:class:`hathor.transaction.TxInput`] + :rtype other_inputs: list[py:class:`hathor.transaction.TxInput`] """ my_inputs = [] other_inputs = [] @@ -449,7 +449,7 @@ def handle_change_tx(self, sum_inputs: int, sum_outputs: int, def get_inputs_from_amount( self, amount: int, tx_storage: 'TransactionStorage', token_uid: bytes = settings.HATHOR_TOKEN_UID, max_ts: Optional[int] = None - ) -> Tuple[List[WalletInputInfo], int]: + ) -> tuple[list[WalletInputInfo], int]: """Creates inputs from our pool of unspent tx given a value This is a very simple algorithm, so it does not try to find the best combination @@ -827,12 +827,12 @@ def on_tx_winner(self, tx: Transaction) -> None: # publish update history self.publish_update(HathorEvents.WALLET_HISTORY_UPDATED) - def get_history(self, count: int = 10, page: int = 1) -> Tuple[List[Union['SpentTx', 'UnspentTx']], int]: + def get_history(self, count: int = 10, page: int = 1) -> tuple[list[Union['SpentTx', 'UnspentTx']], int]: """Return the last transactions in this wallet ordered by timestamp and the total :rtype: tuple[list[SpentTx, UnspentTx], int] """ - history: List[Union['SpentTx', 'UnspentTx']] = [] + history: list[Union['SpentTx', 'UnspentTx']] = [] for obj_dict in self.unspent_txs.values(): history += obj_dict.values() @@ -919,8 +919,8 @@ def should_schedule_update(self, smallest_timestamp: float) -> None: # If dont have any other timelock, set balance update to None self.balance_update = None - def match_inputs(self, inputs: List[TxInput], - tx_storage: TransactionStorage) -> Iterable[Tuple[TxInput, Optional[str]]]: + def match_inputs(self, inputs: list[TxInput], + tx_storage: TransactionStorage) -> Iterable[tuple[TxInput, Optional[str]]]: """Returns an iterable with the inputs that belong and don't belong to this wallet :return: An iterable with the inputs and corresponding address, if it belongs to this wallet @@ -955,8 +955,8 @@ def __init__(self, tx_id: bytes, index: int, value: int, timestamp: int, address self.test_used = False # flag to prevent twin txs being created (for tests only!!) self.maybe_spent_ts = inf - def to_dict(self) -> Dict[str, Any]: - data: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + data: dict[str, Any] = {} data['timestamp'] = self.timestamp data['tx_id'] = self.tx_id.hex() data['index'] = self.index @@ -968,7 +968,7 @@ def to_dict(self) -> Dict[str, Any]: return data @classmethod - def from_dict(cls, data: Dict[str, Any]) -> 'UnspentTx': + def from_dict(cls, data: dict[str, Any]) -> 'UnspentTx': return cls(bytes.fromhex(data['tx_id']), data['index'], data['value'], data['timestamp'], data['address'], data['token_data'], data['voided'], data['timelock']) @@ -1005,8 +1005,8 @@ def __init__(self, tx_id: bytes, from_tx_id: bytes, from_index: int, value: int, self.timestamp = timestamp self.voided = voided - def to_dict(self) -> Dict[str, Any]: - data: Dict[str, Any] = {} + def to_dict(self) -> dict[str, Any]: + data: dict[str, Any] = {} data['timestamp'] = self.timestamp data['tx_id'] = self.tx_id.hex() data['from_tx_id'] = self.from_tx_id.hex() @@ -1016,7 +1016,7 @@ def to_dict(self) -> Dict[str, Any]: return data @classmethod - def from_dict(cls, data: Dict[str, Any]) -> 'SpentTx': + def from_dict(cls, data: dict[str, Any]) -> 'SpentTx': return cls( bytes.fromhex(data['tx_id']), bytes.fromhex(data['from_tx_id']), data['from_index'], data['value'], data['timestamp']) diff --git a/hathor/wallet/hd_wallet.py b/hathor/wallet/hd_wallet.py index b3dcca1bf..3773ed4df 100644 --- a/hathor/wallet/hd_wallet.py +++ b/hathor/wallet/hd_wallet.py @@ -13,7 +13,7 @@ # limitations under the License. import hashlib -from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple +from typing import TYPE_CHECKING, Any, Optional from mnemonic import Mnemonic @@ -86,8 +86,8 @@ def __init__(self, *, words: Optional[Any] = None, language: str = 'english', pa """ super().__init__(directory=directory, pubsub=pubsub, reactor=reactor) - # Dict[string(base58), BIP32Key] - self.keys: Dict[str, Any] = {} + # dict[string(base58), BIP32Key] + self.keys: dict[str, Any] = {} # Last index that the address was shared # We use this index to know which address should be shared with the user @@ -311,7 +311,7 @@ def validate_words(self): if not self.words or not self.mnemonic.check(self.words): raise InvalidWords - def get_input_aux_data(self, data_to_sign: bytes, private_key: 'Key') -> Tuple[bytes, bytes]: + def get_input_aux_data(self, data_to_sign: bytes, private_key: 'Key') -> tuple[bytes, bytes]: """ Sign the data to be used in input and get public key compressed in bytes :param data_to_sign: Data to be signed diff --git a/hathor/wallet/keypair.py b/hathor/wallet/keypair.py index d48e3557f..d526e1c48 100644 --- a/hathor/wallet/keypair.py +++ b/hathor/wallet/keypair.py @@ -13,7 +13,7 @@ # limitations under the License. import base64 -from typing import Any, Dict, Optional +from typing import Any, Optional from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization @@ -84,7 +84,7 @@ def get_private_key(self, password: bytes) -> ec.EllipticCurvePrivateKey: raise IncorrectPassword return priv_key - def to_json(self) -> Dict[str, Any]: + def to_json(self) -> dict[str, Any]: return { 'privKey': self.get_private_key_b64(), 'address': self.address, @@ -92,7 +92,7 @@ def to_json(self) -> Dict[str, Any]: } @classmethod - def from_json(cls, json_data: Dict[str, Any]) -> 'KeyPair': + def from_json(cls, json_data: dict[str, Any]) -> 'KeyPair': priv_key_bytes = base64.b64decode(json_data['privKey']) address = json_data['address'] used = json_data['used'] diff --git a/hathor/wallet/resources/lock.py b/hathor/wallet/resources/lock.py index 430977db4..7417934e4 100644 --- a/hathor/wallet/resources/lock.py +++ b/hathor/wallet/resources/lock.py @@ -33,7 +33,7 @@ def render_POST(self, request): """ Lock the wallet :return: Boolean if the user locked the wallet with success - :rtype: string (json) Dict['success', bool] + :rtype: string (json) dict['success', bool] """ request.setHeader(b'content-type', b'application/json; charset=utf-8') set_cors(request, 'POST') diff --git a/hathor/wallet/resources/nano_contracts/execute.py b/hathor/wallet/resources/nano_contracts/execute.py index 059a50dbf..d08bd4c5a 100644 --- a/hathor/wallet/resources/nano_contracts/execute.py +++ b/hathor/wallet/resources/nano_contracts/execute.py @@ -15,7 +15,7 @@ import base64 import binascii from json import JSONDecodeError -from typing import Any, Dict, NamedTuple +from typing import Any, NamedTuple from hathor.api_util import Resource, get_missing_params_msg, render_options, set_cors from hathor.cli.openapi_files.register import register_resource @@ -109,7 +109,7 @@ def render_POST(self, request): def render_OPTIONS(self, request): return render_options(request) - def decode_params(self, data: Dict[str, Any]) -> DecodedParams: + def decode_params(self, data: dict[str, Any]) -> DecodedParams: """Decode the data required for execute operation. Raise an error if any of the fields is not of the expected type. """ diff --git a/hathor/wallet/resources/nano_contracts/match_value.py b/hathor/wallet/resources/nano_contracts/match_value.py index 72c904812..d930f3935 100644 --- a/hathor/wallet/resources/nano_contracts/match_value.py +++ b/hathor/wallet/resources/nano_contracts/match_value.py @@ -16,7 +16,7 @@ import binascii import struct from json import JSONDecodeError -from typing import Any, Dict, NamedTuple +from typing import Any, NamedTuple from hathor.api_util import Resource, get_missing_params_msg, render_options, set_cors from hathor.cli.openapi_files.register import register_resource @@ -32,7 +32,7 @@ class DecodedPostParams(NamedTuple): - value_dict: Dict[bytes, int] + value_dict: dict[bytes, int] fallback_address: bytes min_timestamp: int oracle_pubkey_hash: bytes @@ -42,7 +42,7 @@ class DecodedPostParams(NamedTuple): class DecodedPutParams(NamedTuple): - new_value_dict: Dict[bytes, int] + new_value_dict: dict[bytes, int] input_value: int tx_bytes: bytes @@ -62,7 +62,7 @@ def render_POST(self, request): """ Creates a nano contract tx and returns it in hexadecimal format. Post data should be a json with the following items: - values: List[{'address', 'value'}], with bet address and value + values: list[{'address', 'value'}], with bet address and value fallback_address: if none of the addresses above is the winner, this address can execute the contract oracle_pubkey_hash: oracle's public key hashed @@ -111,7 +111,7 @@ def render_POST(self, request): ret = {'success': True, 'hex_tx': tx.get_struct().hex()} return json_dumpb(ret) - def decode_post_params(self, data: Dict[str, Any]) -> DecodedPostParams: + def decode_post_params(self, data: dict[str, Any]) -> DecodedPostParams: """Decode the data required on POST request. Raise an error if any of the fields is not of the expected type. """ @@ -159,7 +159,7 @@ def render_PUT(self, request): Post data should be a json with the following items: hex_tx: tx being updated, in hex value - new_values: List[{'address', 'value'}], with bet address and value + new_values: list[{'address', 'value'}], with bet address and value input_value: amount this wallet should stake in the nano contract :rtype: string (json) @@ -220,7 +220,7 @@ def render_PUT(self, request): ret = {'success': True, 'hex_tx': tx.get_struct().hex()} return json_dumpb(ret) - def decode_put_params(self, data: Dict[str, Any]) -> DecodedPutParams: + def decode_put_params(self, data: dict[str, Any]) -> DecodedPutParams: """Decode the data required on PUT request. Raise an error if any of the fields is not of the expected type. """ diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index ffbe96093..703857e27 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict, Optional, Union +from typing import Any, Optional, Union from twisted.internet import threads from twisted.web.http import Request @@ -118,7 +118,7 @@ def render_POST(self, request): from twisted.web.server import NOT_DONE_YET return NOT_DONE_YET - def _render_POST_thread(self, values: Dict[str, Any], request: Request) -> Union[bytes, Transaction]: + def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union[bytes, Transaction]: tx = self.manager.wallet.prepare_transaction(Transaction, values['inputs'], values['outputs'], values['timestamp']) tx.storage = values['storage'] diff --git a/hathor/wallet/resources/thin_wallet/address_balance.py b/hathor/wallet/resources/thin_wallet/address_balance.py index 3af76df3a..20cecdf3b 100644 --- a/hathor/wallet/resources/thin_wallet/address_balance.py +++ b/hathor/wallet/resources/thin_wallet/address_balance.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import defaultdict -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from twisted.web.http import Request @@ -100,7 +100,7 @@ def render_GET(self, request: Request) -> bytes: 'message': 'Invalid \'address\' parameter' }) - tokens_data: Dict[bytes, TokenData] = defaultdict(TokenData) + tokens_data: dict[bytes, TokenData] = defaultdict(TokenData) tx_hashes = addresses_index.get_from_address(requested_address) for tx_hash in tx_hashes: tx = self.manager.tx_storage.get_transaction(tx_hash) @@ -121,7 +121,7 @@ def render_GET(self, request: Request) -> bytes: token_uid = tx.get_token_uid(tx_output.get_token_index()) tokens_data[token_uid].received += tx_output.value - return_tokens_data: Dict[str, Dict[str, Any]] = {} + return_tokens_data: dict[str, dict[str, Any]] = {} for token_uid in tokens_data.keys(): if token_uid == settings.HATHOR_TOKEN_UID: tokens_data[token_uid].name = settings.HATHOR_TOKEN_NAME diff --git a/hathor/wallet/resources/thin_wallet/address_history.py b/hathor/wallet/resources/thin_wallet/address_history.py index 2bcb885e3..b0dc800b8 100644 --- a/hathor/wallet/resources/thin_wallet/address_history.py +++ b/hathor/wallet/resources/thin_wallet/address_history.py @@ -13,7 +13,7 @@ # limitations under the License. from json import JSONDecodeError -from typing import Any, Dict, List, Optional, Set +from typing import Any, Optional from twisted.web.http import Request @@ -144,7 +144,7 @@ def render_GET(self, request: Request) -> bytes: # Old and deprecated resource return self.deprecated_resource(request) - def get_address_history(self, addresses: List[str], ref_hash: Optional[str]) -> bytes: + def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> bytes: ref_hash_bytes = None if ref_hash: try: @@ -166,7 +166,7 @@ def get_address_history(self, addresses: List[str], ref_hash: Optional[str]) -> total_elements = 0 history = [] - seen: Set[bytes] = set() + seen: set[bytes] = set() # XXX In this algorithm we need to sort all transactions of an address # and find one specific (in case of a pagination request) # so if this address has many txs, this could become slow @@ -234,7 +234,7 @@ def get_address_history(self, addresses: List[str], ref_hash: Optional[str]) -> first_address = address break - data: Dict[str, Any] = { + data: dict[str, Any] = { 'success': True, 'history': history, 'has_more': has_more, @@ -255,7 +255,7 @@ def deprecated_resource(self, request: Request) -> bytes: addresses = raw_args[b'addresses[]'] history = [] - seen: Set[bytes] = set() + seen: set[bytes] = set() for address_to_decode in addresses: address = address_to_decode.decode('utf-8') try: diff --git a/hathor/wallet/resources/thin_wallet/tokens.py b/hathor/wallet/resources/thin_wallet/tokens.py index 2ad8b46ec..6e2789f01 100644 --- a/hathor/wallet/resources/thin_wallet/tokens.py +++ b/hathor/wallet/resources/thin_wallet/tokens.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict +from typing import Any from twisted.web.http import Request @@ -35,7 +35,7 @@ class TokenResource(Resource): def __init__(self, manager): self.manager = manager - def get_one_token_data(self, token_uid: bytes) -> Dict[str, Any]: + def get_one_token_data(self, token_uid: bytes) -> dict[str, Any]: # Get one token data specified in id tokens_index = self.manager.tx_storage.indexes.tokens try: @@ -71,7 +71,7 @@ def get_one_token_data(self, token_uid: bytes) -> Dict[str, Any]: } return data - def get_list_token_data(self) -> Dict[str, Any]: + def get_list_token_data(self) -> dict[str, Any]: # XXX We should change this in the future so we don't return all tokens in one request # XXX Right now, the way we have the tokens index is not easy to do it but in the future # XXX when the number of tokens grow we should refactor this resource diff --git a/hathor/wallet/resources/unlock.py b/hathor/wallet/resources/unlock.py index 55e820a51..829e68a58 100644 --- a/hathor/wallet/resources/unlock.py +++ b/hathor/wallet/resources/unlock.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Dict +from typing import Any from hathor.api_util import Resource, render_options, set_cors from hathor.cli.openapi_files.register import register_resource @@ -40,7 +40,7 @@ def render_POST(self, request): :type password: string :return: Boolean if the user unlocked the wallet with success - :rtype: string (json) Dict['success', bool] + :rtype: string (json) dict['success', bool] """ request.setHeader(b'content-type', b'application/json; charset=utf-8') set_cors(request, 'POST') @@ -53,13 +53,13 @@ def render_POST(self, request): # Wallet HD return self.unlock_wallet_hd(post_data) - def unlock_wallet_hd(self, data: Dict[str, Any]) -> bytes: + def unlock_wallet_hd(self, data: dict[str, Any]) -> bytes: words = None if 'words' in data: words = data['words'] passphrase = bytes(data['passphrase'], 'utf-8') - ret: Dict[str, Any] = {'success': True} + ret: dict[str, Any] = {'success': True} try: ret_words = self.manager.wallet.unlock(self.manager.tx_storage, words, passphrase) @@ -72,9 +72,9 @@ def unlock_wallet_hd(self, data: Dict[str, Any]) -> bytes: return json_dumpb(ret) - def unlock_wallet_keypair(self, data: Dict[str, Any]) -> bytes: + def unlock_wallet_keypair(self, data: dict[str, Any]) -> bytes: password = bytes(data['password'], 'utf-8') - ret: Dict[str, Any] = {} + ret: dict[str, Any] = {} success = True try: diff --git a/hathor/wallet/util.py b/hathor/wallet/util.py index 917edaf98..d8c1fa3a7 100644 --- a/hathor/wallet/util.py +++ b/hathor/wallet/util.py @@ -13,7 +13,7 @@ # limitations under the License. import hashlib -from typing import List, Optional +from typing import Optional import base58 from cryptography.hazmat.primitives import hashes @@ -27,7 +27,7 @@ settings = HathorSettings() -def generate_multisig_redeem_script(signatures_required: int, public_key_bytes: List[bytes]) -> bytes: +def generate_multisig_redeem_script(signatures_required: int, public_key_bytes: list[bytes]) -> bytes: """ Generate the redeem script for the multisig output ... @@ -36,7 +36,7 @@ def generate_multisig_redeem_script(signatures_required: int, public_key_bytes: :type signatures_required: int :param public_key_bytes: Array of public keys that created the multisig wallet - :type public_key_bytes: List[bytes] + :type public_key_bytes: list[bytes] :return: The redeem script for the multisig wallet :rtype: bytes diff --git a/hathor/wallet/wallet.py b/hathor/wallet/wallet.py index fbbe0a1d0..55ce19211 100644 --- a/hathor/wallet/wallet.py +++ b/hathor/wallet/wallet.py @@ -15,7 +15,7 @@ import hashlib import json import os -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional from cryptography.hazmat.backends.openssl.ec import _EllipticCurvePrivateKey from cryptography.hazmat.primitives import hashes @@ -39,7 +39,7 @@ def __init__(self, keys: Optional[Any] = None, directory: str = './', filename: only contain wallet associated files. :param keys: keys to initialize this wallet - :type keys: Dict[string(base58), :py:class:`hathor.wallet.keypair.KeyPair`] + :type keys: dict[string(base58), :py:class:`hathor.wallet.keypair.KeyPair`] :param directory: where to store wallet associated files :type directory: string @@ -53,9 +53,9 @@ def __init__(self, keys: Optional[Any] = None, directory: str = './', filename: super().__init__(directory=directory, pubsub=pubsub, reactor=reactor) self.filepath = os.path.join(directory, filename) - self.keys: Dict[str, Any] = keys or {} # Dict[string(b58_address), KeyPair] + self.keys: dict[str, Any] = keys or {} # dict[string(b58_address), KeyPair] - # Set[string(base58)] + # set[string(base58)] self.unused_keys = set(key.address for key in self.keys.values() if not key.used) self.password: Optional[bytes] = None @@ -204,7 +204,7 @@ def tokens_received(self, address58: str) -> None: def is_locked(self): return self.password is None - def get_input_aux_data(self, data_to_sign: bytes, private_key: _EllipticCurvePrivateKey) -> Tuple[bytes, bytes]: + def get_input_aux_data(self, data_to_sign: bytes, private_key: _EllipticCurvePrivateKey) -> tuple[bytes, bytes]: """ Sign the data to be used in input and get public key compressed in bytes :param data_to_sign: Data to be signed diff --git a/hathor/websocket/factory.py b/hathor/websocket/factory.py index 4e672d7c1..1a797189d 100644 --- a/hathor/websocket/factory.py +++ b/hathor/websocket/factory.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import defaultdict, deque -from typing import Any, DefaultDict, Deque, Dict, Optional, Set, Union +from typing import Any, Optional, Union from autobahn.exception import Disconnected from autobahn.twisted.websocket import WebSocketServerFactory @@ -37,7 +37,7 @@ # max_hits (int) and hits_window_seconds (int): together they define the Rate Limit # It's how many hits can this message make in the window interval -CONTROLLED_TYPES: Dict[str, Dict[str, Any]] = { +CONTROLLED_TYPES: dict[str, dict[str, Any]] = { HathorEvents.NETWORK_NEW_TX_ACCEPTED.value: { 'buffer_size': 20, 'time_buffering': 0.1, @@ -91,16 +91,16 @@ def __init__(self, metrics: Optional[Metrics] = None, address_index: Optional[Ad """ # Opened websocket connections so I can broadcast messages later # It contains only connections that have finished handshaking. - self.connections: Set[HathorAdminWebsocketProtocol] = set() + self.connections: set[HathorAdminWebsocketProtocol] = set() # Websocket connection for each address - self.address_connections: DefaultDict[str, Set[HathorAdminWebsocketProtocol]] = defaultdict(set) + self.address_connections: defaultdict[str, set[HathorAdminWebsocketProtocol]] = defaultdict(set) super().__init__() # Limit the send message rate for specific type of data self.rate_limiter = RateLimiter(reactor=reactor) # Stores the buffer of messages that exceeded the rate limit and will be sent - self.buffer_deques: Dict[str, Deque[Dict[str, Any]]] = {} + self.buffer_deques: dict[str, deque[dict[str, Any]]] = {} self.metrics = metrics self.address_index = address_index @@ -174,7 +174,7 @@ def handle_publish(self, key, args): data['type'] = key.value self.send_or_enqueue(data) - def serialize_message_data(self, event: HathorEvents, args: EventArguments) -> Dict[str, Any]: + def serialize_message_data(self, event: HathorEvents, args: EventArguments) -> dict[str, Any]: """ Receives the event and the args from the pubsub and serializes the data so it can be passed in the websocket """ @@ -209,7 +209,7 @@ def serialize_message_data(self, event: HathorEvents, args: EventArguments) -> D else: raise ValueError('Should never have entered here! We dont know this event') - def execute_send(self, data: Dict[str, Any], connections: Set[HathorAdminWebsocketProtocol]) -> None: + def execute_send(self, data: dict[str, Any], connections: set[HathorAdminWebsocketProtocol]) -> None: """ Send data in ws message for the connections """ try: @@ -228,12 +228,12 @@ def execute_send(self, data: Dict[str, Any], connections: Set[HathorAdminWebsock except Exception: self.log.error('send failed, moving on', exc_info=True) - def broadcast_message(self, data: Dict[str, Any]) -> None: + def broadcast_message(self, data: dict[str, Any]) -> None: """ Broadcast the update message to the connections """ self.execute_send(data, self.connections) - def send_message(self, data: Dict[str, Any]) -> None: + def send_message(self, data: dict[str, Any]) -> None: """ Check if should broadcast the message to all connections or send directly to some connections only """ if data['type'] in ADDRESS_EVENTS: @@ -250,7 +250,7 @@ def send_or_enqueue(self, data): Rate limits change according to the message type, which is obtained from data['type']. :param data: message to be sent - :type data: Dict[string, X] -> X can be different types, depending on the type of message + :type data: dict[string, X] -> X can be different types, depending on the type of message """ if data['type'] in CONTROLLED_TYPES: # This type is controlled, so I need to check the deque @@ -268,7 +268,7 @@ def enqueue_for_later(self, data): If this deque is not programed to be called later yet, we call it :param data: message to be sent - :type data: Dict[string, X] -> X can be different types, depending on the type of message + :type data: dict[string, X] -> X can be different types, depending on the type of message """ # Add data to deque # We always add the new messages in the end @@ -283,7 +283,7 @@ def enqueue_for_later(self, data): def process_deque(self, data_type): """ Process the deque and check if I have limit to send the messages now - :param data_type: Type of the message to be sent + :param data_type: type of the message to be sent :type data_type: string """ while len(self.buffer_deques[data_type]) > 0: @@ -312,15 +312,15 @@ def handle_message(self, connection: HathorAdminWebsocketProtocol, data: Union[b elif message['type'] == 'unsubscribe_address': self._handle_unsubscribe_address(connection, message) - def _handle_ping(self, connection: HathorAdminWebsocketProtocol, message: Dict[Any, Any]) -> None: + def _handle_ping(self, connection: HathorAdminWebsocketProtocol, message: dict[Any, Any]) -> None: """ Handler for ping message, should respond with a simple {"type": "pong"}""" payload = json_dumpb({'type': 'pong'}) connection.sendMessage(payload, False) - def _handle_subscribe_address(self, connection: HathorAdminWebsocketProtocol, message: Dict[Any, Any]) -> None: + def _handle_subscribe_address(self, connection: HathorAdminWebsocketProtocol, message: dict[Any, Any]) -> None: """ Handler for subscription to an address, consideirs subscription limits.""" addr: str = message['address'] - subs: Set[str] = connection.subscribed_to + subs: set[str] = connection.subscribed_to if self.max_subs_addrs_conn is not None and len(subs) >= self.max_subs_addrs_conn: payload = json_dumpb({'message': 'Reached maximum number of subscribed ' f'addresses ({self.max_subs_addrs_conn}).', @@ -337,7 +337,7 @@ def _handle_subscribe_address(self, connection: HathorAdminWebsocketProtocol, me payload = json_dumpb({'type': 'subscribe_address', 'success': True}) connection.sendMessage(payload, False) - def _handle_unsubscribe_address(self, connection: HathorAdminWebsocketProtocol, message: Dict[Any, Any]) -> None: + def _handle_unsubscribe_address(self, connection: HathorAdminWebsocketProtocol, message: dict[Any, Any]) -> None: """ Handler for unsubscribing from an address, also removes address connection set if it ends up empty.""" addr = message['address'] if addr in self.address_connections and connection in self.address_connections[addr]: @@ -368,6 +368,6 @@ def on_client_close(self, connection: HathorAdminWebsocketProtocol) -> None: self._remove_connection_from_address_dict(connection, address) -def _count_empty(addresses: Set[str], address_index: AddressIndex) -> int: +def _count_empty(addresses: set[str], address_index: AddressIndex) -> int: """ Count how many of the addresses given are empty (have no outputs).""" return sum(1 for addr in addresses if address_index.is_address_empty(addr)) diff --git a/tests/event/test_simulation.py b/tests/event/test_simulation.py index 57b4742cd..c2e299f9f 100644 --- a/tests/event/test_simulation.py +++ b/tests/event/test_simulation.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List from unittest.mock import Mock import pytest @@ -420,7 +419,7 @@ def _assert_equal_events(actual_events, expected_events): f'actual: {actual_events_chunk}' -def _sorted_by_hash_without_id(events: List[BaseEvent]) -> List[BaseEvent]: +def _sorted_by_hash_without_id(events: list[BaseEvent]) -> list[BaseEvent]: events_without_id = [event.copy(exclude={'id'}) for event in events] def key(event: BaseEvent) -> str: diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py index 447aacfd6..ddb67eaae 100644 --- a/tests/others/test_cli_builder.py +++ b/tests/others/test_cli_builder.py @@ -1,5 +1,3 @@ -from typing import List - import pytest from hathor.builder import CliBuilder, ResourcesBuilder @@ -26,7 +24,7 @@ def setUp(self): self.parser = RunNode.create_parser() self.builder = CliBuilder() - def _build_with_error(self, cmd_args: List[str], err_msg: str) -> None: + def _build_with_error(self, cmd_args: list[str], err_msg: str) -> None: args = self.parser.parse_args(cmd_args) with self.assertRaises(BuilderError) as cm: manager = self.builder.create_manager(self.reactor, args) @@ -34,7 +32,7 @@ def _build_with_error(self, cmd_args: List[str], err_msg: str) -> None: self.resources_builder.build(args) self.assertEqual(err_msg, str(cm.exception)) - def _build(self, cmd_args: List[str]) -> HathorManager: + def _build(self, cmd_args: list[str]) -> HathorManager: args = self.parser.parse_args(cmd_args) manager = self.builder.create_manager(self.reactor, args) self.assertIsNotNone(manager) diff --git a/tests/resources/transaction/test_pushtx.py b/tests/resources/transaction/test_pushtx.py index ba23fedb9..314e7445f 100644 --- a/tests/resources/transaction/test_pushtx.py +++ b/tests/resources/transaction/test_pushtx.py @@ -1,4 +1,4 @@ -from typing import Generator, List, Optional +from typing import Generator, Optional from twisted.internet.defer import inlineCallbacks @@ -29,8 +29,8 @@ def setUp(self): self.web = StubSite(PushTxResource(self.manager)) self.web_tokens = StubSite(SendTokensResource(self.manager)) - def get_tx(self, inputs: Optional[List[WalletInputInfo]] = None, - outputs: Optional[List[WalletOutputInfo]] = None) -> Transaction: + def get_tx(self, inputs: Optional[list[WalletInputInfo]] = None, + outputs: Optional[list[WalletOutputInfo]] = None) -> Transaction: if not outputs: address = self.get_address(0) assert address is not None diff --git a/tests/sysctl/test_sysctl.py b/tests/sysctl/test_sysctl.py index 6e0e80ab5..55137105c 100644 --- a/tests/sysctl/test_sysctl.py +++ b/tests/sysctl/test_sysctl.py @@ -34,7 +34,7 @@ def setUp(self) -> None: ) net.register( 'rate_limit', - MagicMock(return_value=(4, 1)), # Tuple[int, float] + MagicMock(return_value=(4, 1)), # tuple[int, float] MagicMock(), ) core = Sysctl() diff --git a/tests/tx/test_merged_mining.py b/tests/tx/test_merged_mining.py index 8a7fee2f4..ebf032bb1 100644 --- a/tests/tx/test_merged_mining.py +++ b/tests/tx/test_merged_mining.py @@ -1,5 +1,5 @@ import asyncio -from typing import Dict, List, Optional +from typing import Optional from hathor.client import HathorClientStub from hathor.merged_mining import MergedMiningCoordinator @@ -51,9 +51,9 @@ class BitcoinRPCStub(IBitcoinRPC): def __init__(self, response_delay: float = 0.01): self.response_delay = response_delay - async def get_block_template(self, *, rules: List[str] = ['segwit'], longpoll_id: Optional[str], - capabilities: List[str] = ['coinbasetxn', 'workid', 'coinbase/append', 'longpoll'], - ) -> Dict: + async def get_block_template(self, *, rules: list[str] = ['segwit'], longpoll_id: Optional[str], + capabilities: list[str] = ['coinbasetxn', 'workid', 'coinbase/append', 'longpoll'], + ) -> dict: stub = { 'capabilities': ['proposal'], 'version': 536870912, diff --git a/tests/unittest.py b/tests/unittest.py index 476e8bff7..485221402 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -2,7 +2,7 @@ import shutil import tempfile import time -from typing import Iterator, List, Optional +from typing import Iterator, Optional from unittest import main as ut_main from structlog import get_logger @@ -119,10 +119,10 @@ def tearDown(self): def reset_peer_id_pool(self) -> None: self._free_peer_id_pool = self.new_peer_id_pool() - def new_peer_id_pool(self) -> List[PeerId]: + def new_peer_id_pool(self) -> list[PeerId]: return PEER_ID_POOL.copy() - def get_random_peer_id_from_pool(self, pool: Optional[List[PeerId]] = None, + def get_random_peer_id_from_pool(self, pool: Optional[list[PeerId]] = None, rng: Optional[Random] = None) -> PeerId: if pool is None: pool = self._free_peer_id_pool @@ -295,7 +295,7 @@ def assertConsensusEqual(self, manager1, manager2): tx2 = manager2.tx_storage.get_transaction(tx1.hash) tx1_meta = tx1.get_metadata() tx2_meta = tx2.get_metadata() - # conflict_with's type is Optional[List[bytes]], so we convert to a set because order does not matter. + # conflict_with's type is Optional[list[bytes]], so we convert to a set because order does not matter. self.assertEqual(set(tx1_meta.conflict_with or []), set(tx2_meta.conflict_with or [])) # Soft verification if tx1_meta.voided_by is None: diff --git a/tests/utils.py b/tests/utils.py index c9326efb5..9d5e16f77 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -6,7 +6,7 @@ import time import urllib.parse from dataclasses import dataclass -from typing import Iterator, List, Optional, Tuple, TypeVar, cast +from typing import Iterator, Optional, TypeVar, cast import requests from hathorlib.scripts import DataScript @@ -55,7 +55,7 @@ def resolve_block_bytes(block_bytes): return block.get_struct() -def add_custom_tx(manager: HathorManager, tx_inputs: List[Tuple[BaseTransaction, int]], *, n_outputs: int = 1, +def add_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, int]], *, n_outputs: int = 1, base_parent: Optional[Transaction] = None, weight: Optional[float] = None, resolve: bool = False, address: Optional[str] = None, inc_timestamp: int = 0) -> Transaction: """Add a custom tx based on the gen_custom_tx(...) method.""" @@ -65,7 +65,7 @@ def add_custom_tx(manager: HathorManager, tx_inputs: List[Tuple[BaseTransaction, return tx -def gen_custom_tx(manager: HathorManager, tx_inputs: List[Tuple[BaseTransaction, int]], *, n_outputs: int = 1, +def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, int]], *, n_outputs: int = 1, base_parent: Optional[Transaction] = None, weight: Optional[float] = None, resolve: bool = False, address: Optional[str] = None, inc_timestamp: int = 0) -> Transaction: """Generate a custom tx based on the inputs and outputs. It gives full control to the @@ -241,7 +241,7 @@ def add_new_transactions(manager, num_txs, advance_clock=None, propagate=True): :type num_txs: int :return: Transactions created - :rtype: List[Transaction] + :rtype: list[Transaction] """ txs = [] for _ in range(num_txs): @@ -285,7 +285,7 @@ def add_new_blocks(manager, num_blocks, advance_clock=None, *, parent_block_hash :type num_blocks: int :return: Blocks created - :rtype: List[Block] + :rtype: list[Block] """ blocks = [] for _ in range(num_blocks): @@ -385,10 +385,10 @@ def request_server(path, method, host='http://localhost', port=8085, data=None, :type port: int :param data: Request data - :type data: Dict + :type data: dict :return: Response in json format - :rtype: Dict (json) + :rtype: dict (json) """ partial_url = '{}:{}/{}/'.format(host, port, prefix) url = urllib.parse.urljoin(partial_url, path) @@ -492,7 +492,7 @@ def create_tokens(manager: 'HathorManager', address_b58: Optional[str] = None, m genesis_private_key = get_genesis_key() change_output: Optional[TxOutput] - parents: List[bytes] + parents: list[bytes] if use_genesis: genesis_hash = genesis_block.hash assert genesis_hash is not None @@ -576,7 +576,7 @@ def create_script_with_sigops(nops: int) -> bytes: return hscript.data -def add_tx_with_data_script(manager: 'HathorManager', data: List[str], propagate: bool = True) -> Transaction: +def add_tx_with_data_script(manager: 'HathorManager', data: list[str], propagate: bool = True) -> Transaction: """ This method will create and propagate a transaction with only data script outputs """ wallet = manager.wallet @@ -722,7 +722,7 @@ def create_event(cls, event_id: int) -> BaseEvent: T = TypeVar('T') -def zip_chunkify(flat_list: List[T], chunked_list: List[List[T]]) -> Iterator[Tuple[List[T], List[T]]]: +def zip_chunkify(flat_list: list[T], chunked_list: list[list[T]]) -> Iterator[tuple[list[T], list[T]]]: """ Takes two lists, one flat and one chunked. Chunks the first one into chunks of the same size as the second. Returns a zipped list where each item is a tuple of chunks, one from each list.