diff --git a/extras/custom_checks.sh b/extras/custom_checks.sh index ece887832..7fcf2e796 100644 --- a/extras/custom_checks.sh +++ b/extras/custom_checks.sh @@ -58,6 +58,7 @@ function check_do_not_use_builtin_random_in_tests() { hathor/merged_mining/debug_api.py hathor/client.py hathor/cli/tx_generator.py + tests/test_utils/test_leb128.py ) exclude_params=() for item in "${exclude[@]}"; do @@ -81,9 +82,10 @@ function check_deprecated_typing() { } function check_do_not_import_tests_in_hathor() { - if grep -R '\<.*import .*tests.*\>\|\<.*from .*tests.* import\>' "hathor"; then + if grep -R '\<.*import .*tests.*\>\|\<.*from .*tests.* import\>' "hathor" | grep -v '# skip-import-tests-custom-check'; then echo 'do not import test definitions in the hathor module' echo 'move them from tests to hathor instead' + echo 'alternatively, comment `# skip-import-tests-custom-check` to exclude a line.' return 1 fi return 0 diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 753337568..3c03091a3 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -34,6 +34,10 @@ from hathor.indexes import IndexesManager, RocksDBIndexesManager from hathor.manager import HathorManager from hathor.mining.cpu_mining_service import CpuMiningService +from hathor.nanocontracts import NCRocksDBStorageFactory, NCStorageFactory +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.nc_exec_logs import NCLogConfig, NCLogStorage +from hathor.nanocontracts.sorter.types import NCSorterCallable from hathor.p2p.manager import ConnectionsManager from hathor.p2p.peer import PrivatePeer from hathor.pubsub import PubSubManager @@ -164,6 +168,7 @@ def __init__(self) -> None: self._enable_address_index: bool = False self._enable_tokens_index: bool = False self._enable_utxo_index: bool = False + self._enable_nc_indices: bool = False self._sync_v2_support: SyncSupportLevel = SyncSupportLevel.ENABLED @@ -182,6 +187,12 @@ def __init__(self) -> None: self._enable_ipv6: bool = False self._disable_ipv4: bool = False + self._nc_anti_mev: bool = False + + self._nc_storage_factory: NCStorageFactory | None = None + self._nc_log_storage: NCLogStorage | None = None + self._nc_log_config: NCLogConfig = NCLogConfig.NONE + def build(self) -> BuildArtifacts: if self.artifacts is not None: raise ValueError('cannot call build twice') @@ -214,6 +225,9 @@ def build(self) -> BuildArtifacts: vertex_parser = self._get_or_create_vertex_parser() poa_block_producer = self._get_or_create_poa_block_producer() + if settings.ENABLE_NANO_CONTRACTS: + tx_storage.nc_catalog = self._get_nc_catalog() + if self._enable_address_index: indexes.enable_address_index(pubsub) @@ -223,6 +237,9 @@ def build(self) -> BuildArtifacts: if self._enable_utxo_index: indexes.enable_utxo_index() + if self._enable_nc_indices: + indexes.enable_nc_indices() + kwargs: dict[str, Any] = {} if self._enable_event_queue is not None: @@ -276,7 +293,7 @@ def build(self) -> BuildArtifacts: rocksdb_storage=rocksdb_storage, stratum_factory=stratum_factory, feature_service=feature_service, - bit_signaling_service=bit_signaling_service + bit_signaling_service=bit_signaling_service, ) return self.artifacts @@ -351,6 +368,34 @@ def _get_or_create_execution_manager(self) -> ExecutionManager: return self._execution_manager + def _get_or_create_nc_storage_factory(self) -> NCStorageFactory: + if self._nc_storage_factory is not None: + return self._nc_storage_factory + + rocksdb_storage = self._get_or_create_rocksdb_storage() + self._nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + return self._nc_storage_factory + + def _get_nc_calls_sorter(self) -> NCSorterCallable: + if self._nc_anti_mev: + from hathor.nanocontracts.sorter.random_sorter import random_nc_calls_sorter + return random_nc_calls_sorter + else: + from hathor.nanocontracts.sorter.timestamp_sorter import timestamp_nc_calls_sorter + return timestamp_nc_calls_sorter + + def _get_or_create_nc_log_storage(self) -> NCLogStorage: + if self._nc_log_storage is not None: + return self._nc_log_storage + + rocksdb_storage = self._get_or_create_rocksdb_storage() + self._nc_log_storage = NCLogStorage( + settings=self._get_or_create_settings(), + path=rocksdb_storage.path, + config=self._nc_log_config, + ) + return self._nc_log_storage + def _get_or_create_consensus(self) -> ConsensusAlgorithm: if self._consensus is None: soft_voided_tx_ids = self._get_soft_voided_tx_ids() @@ -359,6 +404,11 @@ def _get_or_create_consensus(self) -> ConsensusAlgorithm: return self._consensus + def _get_nc_catalog(self) -> NCBlueprintCatalog: + from hathor.nanocontracts.catalog import generate_catalog_from_settings + settings = self._get_or_create_settings() + return generate_catalog_from_settings(settings) + def _get_or_create_pubsub(self) -> PubSubManager: if self._pubsub is None: self._pubsub = PubSubManager(self._get_reactor()) @@ -429,12 +479,14 @@ def _get_or_create_tx_storage(self) -> TransactionStorage: store_indexes = None rocksdb_storage = self._get_or_create_rocksdb_storage() + nc_storage_factory = self._get_or_create_nc_storage_factory() vertex_parser = self._get_or_create_vertex_parser() self._tx_storage = TransactionRocksDBStorage( rocksdb_storage, indexes=store_indexes, settings=settings, vertex_parser=vertex_parser, + nc_storage_factory=nc_storage_factory, ) if self._tx_storage_cache: @@ -443,7 +495,12 @@ def _get_or_create_tx_storage(self) -> TransactionStorage: if self._tx_storage_cache_capacity is not None: kwargs['capacity'] = self._tx_storage_cache_capacity self._tx_storage = TransactionCacheStorage( - self._tx_storage, reactor, indexes=indexes, settings=settings, **kwargs + self._tx_storage, + reactor, + indexes=indexes, + settings=settings, + nc_storage_factory=nc_storage_factory, + **kwargs ) return self._tx_storage @@ -658,6 +715,11 @@ def enable_utxo_index(self) -> 'Builder': self._enable_utxo_index = True return self + def enable_nc_indices(self) -> 'Builder': + self.check_if_can_modify() + self._enable_nc_indices = True + return self + def enable_wallet_index(self) -> 'Builder': if self._tx_storage or self._indexes_manager: raise ValueError('cannot enable index after tx storage or indexes manager is set') @@ -744,6 +806,16 @@ def disable_ipv4(self) -> 'Builder': self._disable_ipv4 = True return self + def enable_nc_anti_mev(self) -> 'Builder': + self.check_if_can_modify() + self._nc_anti_mev = True + return self + + def disable_nc_anti_mev(self) -> 'Builder': + self.check_if_can_modify() + self._nc_anti_mev = False + return self + def set_soft_voided_tx_ids(self, soft_voided_tx_ids: set[bytes]) -> 'Builder': self.check_if_can_modify() self._soft_voided_tx_ids = soft_voided_tx_ids @@ -769,3 +841,8 @@ def set_poa_signer(self, signer: PoaSigner) -> 'Builder': self.check_if_can_modify() self._poa_signer = signer return self + + def set_nc_log_config(self, config: NCLogConfig) -> 'Builder': + self.check_if_can_modify() + self._nc_log_config = config + return self diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index e946023b2..212f6c7c3 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -78,6 +78,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: from hathor.daa import TestMode from hathor.event.storage import EventRocksDBStorage, EventStorage from hathor.event.websocket.factory import EventWebsocketFactory + from hathor.nanocontracts import NCRocksDBStorageFactory, NCStorageFactory from hathor.p2p.netfilter.utils import add_peer_id_blacklist from hathor.p2p.peer_discovery import BootstrapPeerDiscovery, DNSPeerDiscovery from hathor.storage import RocksDBStorage @@ -134,6 +135,8 @@ def create_manager(self, reactor: Reactor) -> HathorManager: if self._args.data else RocksDBStorage.create_temp(cache_capacity) ) + self.nc_storage_factory: NCStorageFactory = NCRocksDBStorageFactory(self.rocksdb_storage) + # Initialize indexes manager. indexes = RocksDBIndexesManager(self.rocksdb_storage, settings=settings) @@ -143,7 +146,11 @@ def create_manager(self, reactor: Reactor) -> HathorManager: # only TransactionCacheStorage should have indexes. kwargs['indexes'] = indexes tx_storage = TransactionRocksDBStorage( - self.rocksdb_storage, settings=settings, vertex_parser=vertex_parser, **kwargs + self.rocksdb_storage, + settings=settings, + vertex_parser=vertex_parser, + nc_storage_factory=self.nc_storage_factory, + **kwargs ) event_storage = EventRocksDBStorage(self.rocksdb_storage) feature_storage = FeatureActivationStorage(settings=settings, rocksdb_storage=self.rocksdb_storage) @@ -158,7 +165,13 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.check_or_raise(self._args.cache_interval is None, 'cannot use --disable-cache with --cache-interval') if not self._args.disable_cache: - tx_storage = TransactionCacheStorage(tx_storage, reactor, indexes=indexes, settings=settings) + tx_storage = TransactionCacheStorage( + tx_storage, + reactor, + indexes=indexes, + settings=settings, + nc_storage_factory=self.nc_storage_factory, + ) tx_storage.capacity = self._args.cache_size if self._args.cache_size is not None else DEFAULT_CACHE_SIZE if self._args.cache_interval: tx_storage.interval = self._args.cache_interval @@ -167,6 +180,10 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.tx_storage = tx_storage self.log.info('with indexes', indexes_class=type(tx_storage.indexes).__name__) + if settings.ENABLE_NANO_CONTRACTS: + from hathor.nanocontracts.catalog import generate_catalog_from_settings + self.tx_storage.nc_catalog = generate_catalog_from_settings(settings) + self.wallet = None if self._args.wallet: self.wallet = self.create_wallet() @@ -213,6 +230,16 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.log.debug('enable utxo index') tx_storage.indexes.enable_utxo_index() + self.check_or_raise( + not self._args.nc_history_index, + '--nc-history-index has been deprecated, use --nc-indices instead', + ) + if self._args.nc_indices and tx_storage.indexes is not None: + self.log.debug('enable nano indices') + tx_storage.indexes.enable_nc_indices() + + assert self.nc_storage_factory is not None + soft_voided_tx_ids = set(settings.SOFT_VOIDED_TX_IDS) consensus_algorithm = ConsensusAlgorithm( soft_voided_tx_ids, diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index 07bc93b25..58060a51f 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -69,6 +69,7 @@ def create_parser(cls) -> ArgumentParser: """ from hathor.cli.util import create_parser from hathor.feature_activation.feature import Feature + from hathor.nanocontracts.nc_exec_logs import NCLogConfig parser = create_parser(prefix=cls.env_vars_prefix) parser.add_argument('--hostname', help='Hostname used to be accessed by other peers') @@ -115,6 +116,9 @@ def create_parser(cls) -> ArgumentParser: help='Create an index of transactions by address and allow searching queries') parser.add_argument('--utxo-index', action='store_true', help='Create an index of UTXOs by token/address/amount and allow searching queries') + parser.add_argument('--nc-history-index', action='store_true', help=SUPPRESS) # moved to --nc-indices + parser.add_argument('--nc-indices', action='store_true', + help='Enable indices related to nano contracts') parser.add_argument('--prometheus', action='store_true', help='Send metric data to Prometheus') parser.add_argument('--prometheus-prefix', default='', help='A prefix that will be added in all Prometheus metrics') @@ -165,6 +169,9 @@ def create_parser(cls) -> ArgumentParser: help='Enables listening on IPv6 interface and connecting to IPv6 peers') parser.add_argument('--x-disable-ipv4', action='store_true', help='Disables connecting to IPv4 peers') + possible_nc_exec_logs = [config.value for config in NCLogConfig] + parser.add_argument('--nc-exec-logs', default=NCLogConfig.NONE, choices=possible_nc_exec_logs, + help=f'Enable saving Nano Contracts execution logs. One of {possible_nc_exec_logs}') return parser def prepare(self, *, register_resources: bool = True) -> None: diff --git a/hathor/cli/run_node_args.py b/hathor/cli/run_node_args.py index cdebb7830..238823c44 100644 --- a/hathor/cli/run_node_args.py +++ b/hathor/cli/run_node_args.py @@ -17,6 +17,7 @@ from pydantic import Extra from hathor.feature_activation.feature import Feature # skip-cli-import-custom-check +from hathor.nanocontracts.nc_exec_logs import NCLogConfig # skip-cli-import-custom-check from hathor.utils.pydantic import BaseModel # skip-cli-import-custom-check @@ -87,3 +88,6 @@ class RunNodeArgs(BaseModel, extra=Extra.allow): x_enable_ipv6: bool x_disable_ipv4: bool localnet: bool + nc_history_index: bool + nc_indices: bool + nc_exec_logs: NCLogConfig diff --git a/hathor/conf/nano_testnet.py b/hathor/conf/nano_testnet.py index 32f7ab7c9..d615947a8 100644 --- a/hathor/conf/nano_testnet.py +++ b/hathor/conf/nano_testnet.py @@ -18,7 +18,7 @@ P2PKH_VERSION_BYTE=b'\x49', MULTISIG_VERSION_BYTE=b'\x87', NETWORK_NAME='nano-testnet-alpha', - BOOTSTRAP_DNS=[], + BOOTSTRAP_DNS=['alpha.nano-testnet.hathor.network'], # Genesis stuff GENESIS_OUTPUT_SCRIPT=bytes.fromhex('76a91478e804bf8aa68332c6c1ada274ac598178b972bf88ac'), GENESIS_BLOCK_TIMESTAMP=1677601898, @@ -34,5 +34,14 @@ MIN_TX_WEIGHT=8, CHECKPOINTS=[], ENABLE_NANO_CONTRACTS=True, - BLUEPRINTS={}, + ENABLE_ON_CHAIN_BLUEPRINTS=True, + NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES=[ + 'WWFiNeWAFSmgtjm4ht2MydwS5GY3kMJsEK', + ], + BLUEPRINTS={ + bytes.fromhex('3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595'): 'Bet', + }, + SOFT_VOIDED_TX_IDS=list(map(bytes.fromhex, [ + '0000003dd5802b05f430a1f54304879173550c0944b49d74321bb9125ee727cb', + ])), ) diff --git a/hathor/conf/nano_testnet.yml b/hathor/conf/nano_testnet.yml index ece2b1b87..cb022710a 100644 --- a/hathor/conf/nano_testnet.yml +++ b/hathor/conf/nano_testnet.yml @@ -1,7 +1,8 @@ P2PKH_VERSION_BYTE: x49 MULTISIG_VERSION_BYTE: x87 NETWORK_NAME: nano-testnet-alpha -BOOTSTRAP_DNS: [] +BOOTSTRAP_DNS: + - alpha.nano-testnet.hathor.network # Genesis stuff GENESIS_OUTPUT_SCRIPT: 76a91478e804bf8aa68332c6c1ada274ac598178b972bf88ac @@ -18,3 +19,11 @@ MIN_TX_WEIGHT_K: 0 MIN_TX_WEIGHT_COEFFICIENT: 0 MIN_TX_WEIGHT: 8 ENABLE_NANO_CONTRACTS: true +ENABLE_ON_CHAIN_BLUEPRINTS: true +NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: + - WWFiNeWAFSmgtjm4ht2MydwS5GY3kMJsEK +BLUEPRINTS: + 3cb032600bdf7db784800e4ea911b10676fa2f67591f82bb62628c234e771595: Bet + +SOFT_VOIDED_TX_IDS: + - 0000003dd5802b05f430a1f54304879173550c0944b49d74321bb9125ee727cb diff --git a/hathor/conf/unittests.py b/hathor/conf/unittests.py index afd06e266..fe809e332 100644 --- a/hathor/conf/unittests.py +++ b/hathor/conf/unittests.py @@ -41,4 +41,8 @@ default_threshold=3 ), ENABLE_NANO_CONTRACTS=True, + ENABLE_ON_CHAIN_BLUEPRINTS=True, + NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES=[ + 'HFwHrQHUftQ7obLj7xbQjG4ZEwvyVXeyoE', + ], ) diff --git a/hathor/conf/unittests.yml b/hathor/conf/unittests.yml index fdcc5e261..ee5407415 100644 --- a/hathor/conf/unittests.yml +++ b/hathor/conf/unittests.yml @@ -17,9 +17,23 @@ GENESIS_TX2_HASH: 33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e8 REWARD_SPEND_MIN_BLOCKS: 10 SLOW_ASSERTS: true MAX_TX_WEIGHT_DIFF_ACTIVATION: 0.0 -ENABLE_NANO_CONTRACTS: true FEATURE_ACTIVATION: evaluation_interval: 4 max_signal_bits: 4 default_threshold: 3 + +ENABLE_NANO_CONTRACTS: true +ENABLE_ON_CHAIN_BLUEPRINTS: true + +NC_ON_CHAIN_BLUEPRINT_ALLOWED_ADDRESSES: + # keypair wallet: + # - privkey: + # MIH0MF8GCSqGSIb3DQEFDTBSMDEGCSqGSIb3DQEFDDAkBBCIdovnmKjK3KUc61YGgja0AgIIAD + # AMBggqhkiG9w0CCQUAMB0GCWCGSAFlAwQBKgQQl2CJT4I2IUzRNoU9hyOWEwSBkLznN9Nunel+ + # kK0FXpk//z0ZAnIyVacfHklCxFGyOj1VSjor0CHzH2Gmblvr+m7lCmRmqSVAwJpplqQYdBUF6s + # R9djHLY6svPY0o//dqQ/xM7QiY2FHlb3JQCTu7DaMflqPcJXlRXAFyoACnmj4/lUJWgrcWalar + # CSI+8rIillg3AU8/2gfoB1BxulVIIG35SQ== + # - password: + # OCBtestPW + - HFwHrQHUftQ7obLj7xbQjG4ZEwvyVXeyoE diff --git a/hathor/dag_builder/builder.py b/hathor/dag_builder/builder.py index e28a6fdfd..2d63f5959 100644 --- a/hathor/dag_builder/builder.py +++ b/hathor/dag_builder/builder.py @@ -14,7 +14,9 @@ from __future__ import annotations +import ast from collections import defaultdict +from types import ModuleType from typing import Iterator from structlog import get_logger @@ -33,10 +35,17 @@ VertexResolverType, WalletFactoryType, ) +from hathor.dag_builder.utils import is_literal, parse_amount_token +from hathor.manager import HathorManager +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.util import initialize_hd_wallet from hathor.wallet import BaseWallet logger = get_logger() +NC_DEPOSIT_KEY = 'nc_deposit' +NC_WITHDRAWAL_KEY = 'nc_withdrawal' + class DAGBuilder: def __init__( @@ -46,6 +55,8 @@ def __init__( genesis_wallet: BaseWallet, wallet_factory: WalletFactoryType, vertex_resolver: VertexResolverType, + nc_catalog: NCBlueprintCatalog, + blueprints_module: ModuleType | None = None, ) -> None: from hathor.dag_builder.default_filler import DefaultFiller from hathor.dag_builder.tokenizer import tokenize @@ -63,6 +74,27 @@ def __init__( genesis_wallet=genesis_wallet, wallet_factory=wallet_factory, vertex_resolver=vertex_resolver, + nc_catalog=nc_catalog, + blueprints_module=blueprints_module, + ) + + @staticmethod + def from_manager( + manager: HathorManager, + genesis_words: str, + wallet_factory: WalletFactoryType, + blueprints_module: ModuleType | None = None + ) -> DAGBuilder: + """Create a DAGBuilder instance from a HathorManager instance.""" + assert manager.tx_storage.nc_catalog + return DAGBuilder( + settings=manager._settings, + daa=manager.daa, + genesis_wallet=initialize_hd_wallet(genesis_words), + wallet_factory=wallet_factory, + vertex_resolver=lambda x: manager.cpu_mining_service.resolve(x), + nc_catalog=manager.tx_storage.nc_catalog, + blueprints_module=blueprints_module, ) def parse_tokens(self, tokens: Iterator[Token]) -> None: @@ -115,6 +147,22 @@ def add_deps(self, _from: str, _to: str) -> Self: from_node.deps.add(_to) return self + def set_balance(self, name: str, token: str, value: int) -> Self: + """Set the expected balance for a given token, where balance = sum(outputs) - sum(inputs). + + =0 means sum(txouts) = sum(txins) + >0 means sum(txouts) > sum(txins), e.g., withdrawal + <0 means sum(txouts) < sum(txins), e.g., deposit + """ + node = self._get_or_create_node(name) + if token in node.balances: + raise SyntaxError(f'{name}: balance set more than once for {token}') + node.balances[token] = value + if token != 'HTR': + self._get_or_create_node(token, default_type=DAGNodeType.Token) + self.add_deps(name, token) + return self + def add_blockchain(self, prefix: str, first_parent: str | None, first_index: int, last_index: int) -> Self: """Add a sequence of nodes representing a chain of blocks.""" prev = first_parent @@ -127,7 +175,7 @@ def add_blockchain(self, prefix: str, first_parent: str | None, first_index: int return self def add_parent_edge(self, _from: str, _to: str) -> Self: - """Add a parent edge between two nodes. For clarity, `_to` has to be created befre `_from`.""" + """Add a parent edge between two nodes. For clarity, `_to` has to be created before `_from`.""" self._get_or_create_node(_to) from_node = self._get_or_create_node(_from) from_node.parents.add(_to) @@ -154,13 +202,85 @@ def set_output(self, name: str, index: int, amount: int, token: str, attrs: Attr node.deps.add(token) return self + def _parse_expression(self, value: str) -> ast.AST: + try: + ret = ast.parse(value, mode='eval').body + except SyntaxError as e: + raise SyntaxError(f'failed parsing "{value}"') from e + return ret + + def _add_nc_attribute(self, name: str, key: str, value: str) -> None: + """Handle attributes related to nanocontract transactions.""" + node = self._get_or_create_node(name) + if key == 'nc_id': + parsed_value = self._parse_expression(value) + if isinstance(parsed_value, ast.Name): + node.deps.add(parsed_value.id) + elif isinstance(parsed_value, ast.Call): + for arg in parsed_value.args: + if isinstance(arg, ast.Name): + node.deps.add(arg.id) + elif isinstance(arg, ast.Attribute): + assert isinstance(arg.value, ast.Name) + node.deps.add(arg.value.id) + node.attrs[key] = parsed_value + + elif key in (NC_DEPOSIT_KEY, NC_WITHDRAWAL_KEY): + token, amount, args = parse_amount_token(value) + if args: + raise SyntaxError(f'unexpected args in `{value}`') + if amount < 0: + raise SyntaxError(f'unexpected negative action in `{value}`') + multiplier = 1 if key == NC_WITHDRAWAL_KEY else -1 + self.set_balance(name, token, amount * multiplier) + actions = node.get_attr_list(key, default=[]) + actions.append((token, amount)) + node.attrs[key] = actions + + else: + node.attrs[key] = value + + def _add_ocb_attribute(self, name: str, key: str, value: str) -> None: + """Handle attributes related to on-chain blueprint transactions.""" + node = self._get_or_create_node(name) + node.type = DAGNodeType.OnChainBlueprint + if key == 'ocb_code': + node.attrs[key] = value + + elif key == 'ocb_private_key': + if not is_literal(value): + raise SyntaxError(f'ocb_private_key must be a bytes literal: {value}') + node.attrs[key] = value + + elif key == 'ocb_password': + if not is_literal(value): + raise SyntaxError(f'ocb_password must be a bytes literal: {value}') + node.attrs[key] = value + + else: + node.attrs[key] = value + def add_attribute(self, name: str, key: str, value: str) -> Self: """Add an attribute to a node.""" + if key.startswith('nc_'): + self._add_nc_attribute(name, key, value) + return self + + if key.startswith('ocb_'): + self._add_ocb_attribute(name, key, value) + return self + + if key.startswith('balance_'): + token = key[len('balance_'):] + self.set_balance(name, token, int(value)) + return self + node = self._get_or_create_node(name) - if key == 'type': - node.type = DAGNodeType(value) - else: + if key not in node.attrs: node.attrs[key] = value + else: + raise SyntaxError('attribute key duplicated') + return self def topological_sorting(self) -> Iterator[DAGNode]: @@ -181,12 +301,14 @@ def topological_sorting(self) -> Iterator[DAGNode]: for _ in range(len(self._nodes)): if len(candidates) == 0: - self.log('fail because there is at least one cycle in the dependencies', - direct_deps=direct_deps, - rev_deps=rev_deps, - seen=seen, - not_seen=set(self._nodes.keys()) - seen, - nodes=self._nodes) + self.log.error( + 'fail because there is at least one cycle in the dependencies', + direct_deps=direct_deps, + rev_deps=rev_deps, + seen=seen, + not_seen=set(self._nodes.keys()) - seen, + nodes=self._nodes, + ) raise RuntimeError('there is at least one cycle') name = candidates.pop() assert name not in seen diff --git a/hathor/dag_builder/cli.py b/hathor/dag_builder/cli.py index ff6184fb4..d8afd0fef 100644 --- a/hathor/dag_builder/cli.py +++ b/hathor/dag_builder/cli.py @@ -23,6 +23,7 @@ def main(filename: str, genesis_seed: str) -> None: from hathor.conf.get_settings import get_global_settings from hathor.daa import DifficultyAdjustmentAlgorithm + from hathor.nanocontracts.catalog import generate_catalog_from_settings from hathor.wallet import HDWallet settings = get_global_settings() @@ -36,6 +37,7 @@ def wallet_factory(words=None): genesis_wallet = wallet_factory(genesis_seed) daa = DifficultyAdjustmentAlgorithm(settings=settings) + nc_catalog = generate_catalog_from_settings(settings) builder = DAGBuilder( settings=settings, @@ -43,6 +45,7 @@ def wallet_factory(words=None): genesis_wallet=genesis_wallet, wallet_factory=wallet_factory, vertex_resolver=lambda x: None, + nc_catalog=nc_catalog, ) fp = open(filename, 'r') diff --git a/hathor/dag_builder/default_filler.py b/hathor/dag_builder/default_filler.py index 95026e2cc..9970b1bd1 100644 --- a/hathor/dag_builder/default_filler.py +++ b/hathor/dag_builder/default_filler.py @@ -15,11 +15,11 @@ from __future__ import annotations from collections import defaultdict -from math import ceil from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.dag_builder.builder import DAGBuilder, DAGInput, DAGNode, DAGNodeType, DAGOutput +from hathor.transaction.util import get_deposit_amount class DefaultFiller: @@ -64,7 +64,7 @@ def get_next_index(outputs: list[DAGOutput | None]) -> int: outputs.append(None) return len(outputs) - 1 - def fill_parents(self, node: DAGNode, *, target: int = 2, candidates: list[str] | None = []) -> None: + def fill_parents(self, node: DAGNode, *, target: int = 2, candidates: list[str] | None = None) -> None: """Fill parents of a vertex. Note: We shouldn't use the DAG transactions because it would confirm them, violating the DAG description.""" @@ -104,7 +104,10 @@ def find_txin(self, amount: int, token: str) -> DAGInput: return DAGInput(token, index) def calculate_balance(self, node: DAGNode) -> dict[str, int]: - """Calculate the balance for each token in a node.""" + """Calculate the balance for each token in a node. + + balance = sum(outputs) - sum(inputs) + """ ins: defaultdict[str, int] = defaultdict(int) for tx_name, index in node.inputs: node2 = self._get_or_create_node(tx_name) @@ -117,7 +120,7 @@ def calculate_balance(self, node: DAGNode) -> dict[str, int]: assert txout is not None outs[txout.token] += txout.amount - keys = set(ins.keys()) | set(outs.keys()) + keys = set(ins.keys()) | set(outs.keys()) | set(node.balances.keys()) balance = {} for key in keys: balance[key] = outs.get(key, 0) - ins.get(key, 0) @@ -129,9 +132,8 @@ def balance_node_inputs_and_outputs(self, node: DAGNode) -> None: balance = self.calculate_balance(node) for key, diff in balance.items(): - # =0 balance - # <0 need output - # >0 need input + target = node.balances.get(key, 0) + diff -= target if diff < 0: index = self.get_next_index(node.outputs) node.outputs[index] = DAGOutput(abs(diff), key, {'_origin': 'f3'}) @@ -221,6 +223,10 @@ def run(self) -> None: self.fill_parents(node) self.balance_node_inputs_and_outputs(node) + case DAGNodeType.OnChainBlueprint: + self.fill_parents(node) + self.balance_node_inputs_and_outputs(node) + case DAGNodeType.Token: tokens.append(node.name) self.fill_parents(node) @@ -234,15 +240,22 @@ def run(self) -> None: balance = self.calculate_balance(node) assert set(balance.keys()).issubset({'HTR', token}) - htr_minimum = ceil(balance[token] / 100) - htr_balance = -balance.get('HTR', 0) + htr_deposit = get_deposit_amount(self._settings, balance[token]) + htr_balance = balance.get('HTR', 0) - if htr_balance > htr_minimum: + # target = sum(outputs) - sum(inputs) + # <0 means deposit + # >0 means withdrawal + htr_target = node.balances.get('HTR', 0) - htr_deposit + + diff = htr_balance - htr_target + + if diff < 0: index = self.get_next_index(node.outputs) - node.outputs[index] = DAGOutput(htr_balance - htr_minimum, 'HTR', {'_origin': 'f8'}) + node.outputs[index] = DAGOutput(-diff, 'HTR', {'_origin': 'f8'}) - elif htr_balance < htr_minimum: - txin = self.find_txin(htr_minimum - htr_balance, 'HTR') + elif diff > 0: + txin = self.find_txin(diff, 'HTR') node.inputs.add(txin) if 'dummy' in self._builder._nodes: diff --git a/hathor/dag_builder/tokenizer.py b/hathor/dag_builder/tokenizer.py index 43de684a1..19dbbed55 100644 --- a/hathor/dag_builder/tokenizer.py +++ b/hathor/dag_builder/tokenizer.py @@ -14,6 +14,7 @@ import re from enum import Enum, auto +from textwrap import dedent from typing import Any, Iterator """ @@ -31,6 +32,11 @@ a.attr1 = value # set value of attribute attr to a a.attr2 = "value" # a string literal + a.attr3 = ``` # a multiline string literal. + if foo: # parsing is limited — there's no support for comments nor escaping characters. + bar # both start and end delimiters must be in their own line. + ``` + Special keywords: b10 < dummy # `dummy` is a tx created automatically that spends genesis tokens and provides @@ -43,6 +49,34 @@ a.out[i] = 100 TOKEN # set that the i-th output of a holds 100 TOKEN where TOKEN is a custom token a.weight = 50 # set vertex weight +Nano Contracts: + + tx1.nc_id = "{'ff' * 32}" # create a Nano Contract with some custom nc_id + tx1.nc_id = tx2 # create a Nano Contract with another tx's id as its nc_id + tx1.nc_deposit = 10 HTR # perform a deposit in a Nano Contract + tx1.nc_withdrawal = 10 HTR # perform a withdraw in a Nano Contract + tx1.nc_method = initialize("00") # call a Nano Contract method + tx2.nc_method = initialize(`tx1`) # call a Nano Contract method with another tx's id as an argument + tx2.nc_seqnum = 5 + + # Points to a contract created by another contract. + tx1.nc_id = child_contract(contract_creator_id, salt.hex(), blueprint_id.hex()) + +On-chain Blueprints: + + ocb1.ocb_private_key = "{private_key}" # private key bytes in hex to sign the OCB + ocb1.ocb_password = "{password}" # password bytes in hex to sign the OCB + + ocb.ocb_code = "{ocb_code_bytes)}" # create an on-chain Blueprint with some custom code. + # the literal should be the hex value of uncompressed code bytes. + + ocb.ocb_code = ``` + class MyBlueprint(Blueprint): # multiline strings can also be used to directly inline custom code. + pass # given its limitations (describe above), for complex code it is + ``` # recommended to use separate files (see below). + + ocb.ocb_code = my_blueprint.py, MyTest # set a filename and a class name to create an OCB using code from a file. + # configure the root directory when instantiating the DagBuilder. Example: @@ -80,8 +114,22 @@ b5 < c0 < c10 < b20 b6 < tx3 b16 < tx4 + + # Nano Contracts and on-chain Blueprints + ocb1.ocb_private_key = "{unittest.OCB_TEST_PRIVKEY.hex()}" + ocb1.ocb_password = "{unittest.OCB_TEST_PASSWORD.hex()}" + ocb1.ocb_code = "{load_blueprint_code('bet.py', 'Bet').encode().hex()}" + + nc1.nc_id = ocb1 + nc1.nc_method = initialize("00", "00", 0) + + ocb1 <-- b300 + b300 < nc1 + """ +MULTILINE_DELIMITER = '```' + class TokenType(Enum): BLOCKCHAIN = auto() @@ -118,8 +166,29 @@ def tokenize(content: str) -> Iterator[Token]: """ blockchain_re = re.compile(r'^([a-zA-Z][a-zA-Z0-9-_]*)\[([0-9]+)..([0-9]+)\]$') first_parent: str | None + + # A `(name, key, lines)` tuple where `lines` contains the multiline string as it accumulates line by line. + multiline_accumulator: tuple[str, str, list[str]] | None = None + for line in content.split('\n'): line, _, _ = line.partition('#') + + if multiline_accumulator is not None: + if MULTILINE_DELIMITER not in line: + _name, _key, lines = multiline_accumulator + lines.append(line) + continue + + if line.strip() != MULTILINE_DELIMITER: + raise SyntaxError('invalid multiline string end') + + name, key, lines = multiline_accumulator + multiline = dedent('\n'.join(lines)) + complete_value = MULTILINE_DELIMITER + multiline + MULTILINE_DELIMITER + yield TokenType.ATTRIBUTE, (name, key, complete_value) + multiline_accumulator = None + continue + line = line.strip() if not line: continue @@ -148,7 +217,17 @@ def tokenize(content: str) -> Iterator[Token]: attrs = parts[4:] yield (TokenType.OUTPUT, (name, index, amount, token, attrs)) else: - yield (TokenType.ATTRIBUTE, (name, key, ' '.join(parts[2:]))) + value = ' '.join(parts[2:]) + + if MULTILINE_DELIMITER not in value: + yield TokenType.ATTRIBUTE, (name, key, value) + continue + + if value != MULTILINE_DELIMITER: + raise SyntaxError('invalid multiline string start') + + assert multiline_accumulator is None + multiline_accumulator = name, key, [] elif parts[1] == '<--': for _to, _from in collect_pairs(parts, '<--'): @@ -178,3 +257,6 @@ def tokenize(content: str) -> Iterator[Token]: else: raise SyntaxError(line) + + if multiline_accumulator is not None: + raise SyntaxError('unclosed multiline string') diff --git a/hathor/dag_builder/types.py b/hathor/dag_builder/types.py index 46d5af170..8e1c0a4a2 100644 --- a/hathor/dag_builder/types.py +++ b/hathor/dag_builder/types.py @@ -14,11 +14,13 @@ from __future__ import annotations +import ast from collections.abc import Callable from dataclasses import dataclass, field from enum import Enum from typing import Any, Iterator, NamedTuple, TypeAlias +from hathor.dag_builder.utils import get_literal from hathor.transaction import BaseTransaction from hathor.wallet import BaseWallet @@ -33,6 +35,7 @@ class DAGNodeType(Enum): Transaction = 'transaction' Token = 'token' Genesis = 'genesis' + OnChainBlueprint = 'on_chain_blueprint' @dataclass @@ -40,17 +43,52 @@ class DAGNode: name: str type: DAGNodeType - attrs: dict[str, str] = field(default_factory=dict) + attrs: dict[str, Any] = field(default_factory=dict) inputs: set[DAGInput] = field(default_factory=set) outputs: list[DAGOutput | None] = field(default_factory=list) parents: set[str] = field(default_factory=set) deps: set[str] = field(default_factory=set) + # expected balance of inputs and outputs per token + # =0 means sum(txouts) = sum(txins) + # >0 means sum(txouts) > sum(txins), e.g., withdrawal + # <0 means sum(txouts) < sum(txins), e.g., deposit + balances: dict[str, int] = field(default_factory=dict) + def get_all_dependencies(self) -> Iterator[str]: yield from self.parents yield from (name for name, _ in self.inputs) yield from self.deps + def get_attr_ast(self, attr: str) -> Any: + value = self.attrs.get(attr) + assert isinstance(value, ast.AST) + return value + + def get_attr_str(self, attr: str, *, default: str | None = None) -> str: + """Return the value of an attribute, a default, or raise a SyntaxError if it doesn't exist.""" + if value := self.attrs.get(attr): + assert isinstance(value, str) + return value + if default is not None: + return default + raise SyntaxError(f'missing required attribute: {self.name}.{attr}') + + def get_attr_list(self, attr: str, *, default: list[Any] | None = None) -> list[Any]: + """Return the value of an attribute, a default, or raise a SyntaxError if it doesn't exist.""" + if value := self.attrs.get(attr): + assert isinstance(value, list) + return value + if default is not None: + return default + raise SyntaxError(f'missing required attribute: {self.name}.{attr}') + + def get_required_literal(self, attr: str) -> str: + """Return the value of a required attribute as a literal or raise a SyntaxError if it doesn't exist.""" + value = self.get_attr_str(attr) + assert isinstance(value, str) + return get_literal(value) + class DAGInput(NamedTuple): node_name: str diff --git a/hathor/dag_builder/utils.py b/hathor/dag_builder/utils.py new file mode 100644 index 000000000..9432af3f0 --- /dev/null +++ b/hathor/dag_builder/utils.py @@ -0,0 +1,48 @@ +# Copyright 2024 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.dag_builder.tokenizer import MULTILINE_DELIMITER + +TEXT_DELIMITER = '"' +LITERAL_DELIMITERS = [TEXT_DELIMITER, MULTILINE_DELIMITER] + + +def is_literal(value: str) -> bool: + """Return true if the value is a literal.""" + return _get_literal_delimiter(value) is not None + + +def get_literal(value: str) -> str: + """Return the content of the literal.""" + delimiter = _get_literal_delimiter(value) + assert delimiter is not None + n = len(delimiter) + return value[n:-n] + + +def _get_literal_delimiter(value: str) -> str | None: + """Return the delimiter if value is a literal, None otherwise.""" + for delimiter in LITERAL_DELIMITERS: + if value.startswith(delimiter) and value.endswith(delimiter): + return delimiter + return None + + +def parse_amount_token(value: str) -> tuple[str, int, list[str]]: + """Parse the format "[amount] [token_symbol] [args]".""" + parts = value.split() + token = parts[1] + amount = int(parts[0]) + args = parts[2:] + return (token, amount, args) diff --git a/hathor/dag_builder/vertex_exporter.py b/hathor/dag_builder/vertex_exporter.py index ea761d36c..55e698038 100644 --- a/hathor/dag_builder/vertex_exporter.py +++ b/hathor/dag_builder/vertex_exporter.py @@ -12,18 +12,33 @@ # See the License for the specific language governing permissions and # limitations under the License. +import ast +import re +from collections import defaultdict +from types import ModuleType from typing import Iterator +from typing_extensions import assert_never + from hathor.conf.settings import HathorSettings -from hathor.crypto.util import decode_address +from hathor.crypto.util import decode_address, get_address_from_public_key_bytes from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.dag_builder.builder import DAGBuilder, DAGNode from hathor.dag_builder.types import DAGNodeType, VertexResolverType, WalletFactoryType +from hathor.dag_builder.utils import get_literal, is_literal +from hathor.nanocontracts import Blueprint, OnChainBlueprint +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.exception import BlueprintDoesNotExist +from hathor.nanocontracts.on_chain_blueprint import Code +from hathor.nanocontracts.types import BlueprintId, ContractId, VertexId +from hathor.nanocontracts.utils import derive_child_contract_id, load_builtin_blueprint_for_ocb from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.base_transaction import TxInput, TxOutput from hathor.transaction.scripts.p2pkh import P2PKH from hathor.transaction.token_creation_tx import TokenCreationTransaction -from hathor.wallet import BaseWallet +from hathor.wallet import BaseWallet, KeyPair + +_TEMPLATE_PATTERN = re.compile(r'`(\w+)`') class VertexExporter: @@ -38,6 +53,8 @@ def __init__( genesis_wallet: BaseWallet, wallet_factory: WalletFactoryType, vertex_resolver: VertexResolverType, + nc_catalog: NCBlueprintCatalog, + blueprints_module: ModuleType | None, ) -> None: self._builder = builder self._vertices: dict[str, BaseTransaction] = {} @@ -49,14 +66,23 @@ def __init__( self._daa = daa self._wallet_factory = wallet_factory self._vertex_resolver = vertex_resolver + self._nc_catalog = nc_catalog + self._blueprints_module = blueprints_module self._wallets['genesis'] = genesis_wallet self._wallets['main'] = self._wallet_factory() + self._next_nc_seqnum: defaultdict[bytes, int] = defaultdict(int) + def _get_node(self, name: str) -> DAGNode: """Get node.""" return self._builder._get_node(name) + def get_wallet(self, name: str) -> BaseWallet: + if name not in self._wallets: + self._wallets[name] = self._wallet_factory() + return self._wallets[name] + def get_vertex_id(self, name: str) -> bytes: """Get the vertex id given its node name.""" return self._vertices[name].hash @@ -122,6 +148,21 @@ def _create_vertex_txout( script = self.get_next_p2pkh_script() outputs.append(TxOutput(value=amount, token_data=index, script=script)) + if token_creation: + # Create mint and melt authorities to be used by future transactions + outputs.extend([ + TxOutput( + value=TxOutput.TOKEN_MINT_MASK, + token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1, + script=self.get_next_p2pkh_script(), + ), + TxOutput( + value=TxOutput.TOKEN_MELT_MASK, + token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1, + script=self.get_next_p2pkh_script(), + ), + ]) + return tokens, outputs def get_next_p2pkh_script(self) -> bytes: @@ -152,7 +193,7 @@ def update_vertex_hash(self, vertex: BaseTransaction, *, fix_conflict: bool = Tr self._vertex_resolver(vertex) vertex.update_hash() - def sign_all_inputs(self, node: DAGNode, vertex: Transaction) -> None: + def sign_all_inputs(self, vertex: Transaction, *, node: DAGNode | None = None) -> None: """Sign all inputs of a vertex.""" data_to_sign = vertex.get_sighash_all() for txin in vertex.inputs: @@ -167,6 +208,8 @@ def sign_all_inputs(self, node: DAGNode, vertex: Transaction) -> None: break except KeyError: pass + else: + raise ValueError('private key not found') public_key_bytes, signature = wallet.get_input_aux_data(data_to_sign, private_key) txin.data = P2PKH.create_input_data(public_key_bytes, signature) @@ -185,7 +228,7 @@ def create_vertex_token(self, node: DAGNode) -> TokenCreationTransaction: vertex.token_name = node.name vertex.token_symbol = node.name vertex.timestamp = self.get_min_timestamp(node) - self.sign_all_inputs(node, vertex) + self.sign_all_inputs(vertex, node=node) if 'weight' in node.attrs: vertex.weight = float(node.attrs['weight']) else: @@ -219,16 +262,100 @@ def create_vertex_block(self, node: DAGNode) -> Block: self._block_height[blk.hash] = height return blk - def create_vertex_transaction(self, node: DAGNode) -> Transaction: + def _get_ast_value_bytes(self, ast_node: ast.AST) -> bytes: + if isinstance(ast_node, ast.Constant): + return bytes.fromhex(ast_node.value) + elif isinstance(ast_node, ast.Name): + return self.get_vertex_id(ast_node.id) + elif isinstance(ast_node, ast.Attribute): + assert isinstance(ast_node.value, ast.Name) + vertex = self._vertices[ast_node.value.id] + assert isinstance(vertex, Transaction) + if ast_node.attr == 'nc_id': + return vertex.get_nano_header().nc_id + else: + raise ValueError + else: + raise ValueError('unsupported ast node') + + def _parse_nc_id(self, ast_node: ast.AST) -> tuple[bytes, BlueprintId | None]: + if not isinstance(ast_node, ast.Call): + return self._get_ast_value_bytes(ast_node), None + + assert isinstance(ast_node.func, ast.Name) + if ast_node.func.id != 'child_contract': + raise ValueError(f'unknown function: {ast_node.func.id}') + args = [self._get_ast_value_bytes(x) for x in ast_node.args] + if len(args) != 3: + raise ValueError('wrong number of args') + parent_id_bytes, salt, blueprint_id_bytes = args + parent_id = ContractId(VertexId(parent_id_bytes)) + blueprint_id = BlueprintId(VertexId(blueprint_id_bytes)) + child_contract_id = derive_child_contract_id(parent_id, salt, blueprint_id) + return child_contract_id, blueprint_id + + def _get_next_nc_seqnum(self, nc_pubkey: bytes) -> int: + address = get_address_from_public_key_bytes(nc_pubkey) + cur = self._next_nc_seqnum[address] + self._next_nc_seqnum[address] = cur + 1 + return cur + + def create_vertex_on_chain_blueprint(self, node: DAGNode) -> OnChainBlueprint: + """Create an OnChainBlueprint given a node.""" + block_parents, txs_parents = self._create_vertex_parents(node) + inputs = self._create_vertex_txin(node) + tokens, outputs = self._create_vertex_txout(node) + + assert len(block_parents) == 0 + ocb = OnChainBlueprint(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) + code_attr = node.get_attr_str('ocb_code') + + if is_literal(code_attr): + code_literal = get_literal(code_attr) + try: + code_bytes = bytes.fromhex(code_literal) + except ValueError: + code_str = code_literal + else: + code_str = code_bytes.decode() + else: + assert self._blueprints_module is not None + filename, _, class_name = code_attr.partition(',') + filename, class_name = filename.strip(), class_name.strip() + if not filename or not class_name: + raise SyntaxError(f'missing blueprint filename or class name: {code_attr}') + code_str = load_builtin_blueprint_for_ocb(filename, class_name, self._blueprints_module) + + ocb.code = Code.from_python_code(code_str, self._settings) + ocb.timestamp = self.get_min_timestamp(node) + self.sign_all_inputs(ocb, node=node) + + private_key_literal = node.get_required_literal('ocb_private_key') + private_key_bytes = bytes.fromhex(private_key_literal) + password_literal = node.get_required_literal('ocb_password') + password_bytes = bytes.fromhex(password_literal) + key = KeyPair(private_key_bytes) + private_key = key.get_private_key(password_bytes) + ocb.sign(private_key) + + if 'weight' in node.attrs: + ocb.weight = float(node.attrs['weight']) + else: + ocb.weight = self._daa.minimum_tx_weight(ocb) + + self.update_vertex_hash(ocb) + return ocb + + def create_vertex_transaction(self, node: DAGNode, *, cls: type[Transaction] = Transaction) -> Transaction: """Create a Transaction given a node.""" block_parents, txs_parents = self._create_vertex_parents(node) inputs = self._create_vertex_txin(node) tokens, outputs = self._create_vertex_txout(node) assert len(block_parents) == 0 - tx = Transaction(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) + tx = cls(parents=txs_parents, inputs=inputs, outputs=outputs, tokens=tokens) tx.timestamp = self.get_min_timestamp(node) - self.sign_all_inputs(node, tx) + self.sign_all_inputs(tx, node=node) if 'weight' in node.attrs: tx.weight = float(node.attrs['weight']) else: @@ -283,8 +410,14 @@ def create_vertex(self, node: DAGNode) -> BaseTransaction: case DAGNodeType.Genesis: vertex = self.create_genesis_vertex(node) + case DAGNodeType.OnChainBlueprint: + vertex = self.create_vertex_on_chain_blueprint(node) + + case DAGNodeType.Unknown: + raise AssertionError('dag type should be known at this point') + case _: - raise NotImplementedError(node.type) + assert_never(node.type) assert vertex is not None assert vertex.hash not in self._vertice_per_id @@ -304,3 +437,13 @@ def export(self) -> Iterator[tuple[DAGNode, BaseTransaction]]: vertex = self.create_vertex(node) if node.type is not DAGNodeType.Genesis: yield node, vertex + + def _get_blueprint_class(self, blueprint_id: BlueprintId) -> type[Blueprint]: + """Get a blueprint class from the catalog or from our own on-chain blueprints.""" + try: + return self._nc_catalog.get_blueprint_class(blueprint_id) + except BlueprintDoesNotExist: + ocb = self._vertice_per_id.get(blueprint_id) + if ocb is None or not isinstance(ocb, OnChainBlueprint): + raise SyntaxError(f'{blueprint_id.hex()} is not a valid blueprint id') + return ocb.get_blueprint_class() diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index af648479a..e686136a1 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -23,9 +23,13 @@ from hathor.indexes.address_index import AddressIndex from hathor.indexes.base_index import BaseIndex +from hathor.indexes.blueprint_history_index import BlueprintHistoryIndex +from hathor.indexes.blueprint_timestamp_index import BlueprintTimestampIndex from hathor.indexes.height_index import HeightIndex from hathor.indexes.info_index import InfoIndex from hathor.indexes.mempool_tips_index import MempoolTipsIndex +from hathor.indexes.nc_creation_index import NCCreationIndex +from hathor.indexes.nc_history_index import NCHistoryIndex from hathor.indexes.timestamp_index import ScopeType as TimestampScopeType, TimestampIndex from hathor.indexes.tips_index import ScopeType as TipsScopeType, TipsIndex from hathor.indexes.tokens_index import TokensIndex @@ -67,6 +71,10 @@ class IndexesManager(ABC): addresses: Optional[AddressIndex] tokens: Optional[TokensIndex] utxo: Optional[UtxoIndex] + nc_creation: Optional[NCCreationIndex] + nc_history: Optional[NCHistoryIndex] + blueprints: Optional[BlueprintTimestampIndex] + blueprint_history: Optional[BlueprintHistoryIndex] def __init_checks__(self): """ Implementations must call this at the **end** of their __init__ for running ValueError checks.""" @@ -95,6 +103,10 @@ def iter_all_indexes(self) -> Iterator[BaseIndex]: self.addresses, self.tokens, self.utxo, + self.nc_creation, + self.nc_history, + self.blueprints, + self.blueprint_history, ]) @abstractmethod @@ -117,6 +129,11 @@ def enable_mempool_index(self) -> None: """Enable mempool index. It does nothing if it has already been enabled.""" raise NotImplementedError + @abstractmethod + def enable_nc_indices(self) -> None: + """Enable Nano Contract related indices.""" + raise NotImplementedError + def force_clear_all(self) -> None: """ Force clear all indexes. """ @@ -219,6 +236,18 @@ def add_tx(self, tx: BaseTransaction) -> bool: if self.tokens: self.tokens.add_tx(tx) + if self.nc_creation: + self.nc_creation.add_tx(tx) + + if self.nc_history: + self.nc_history.add_tx(tx) + + if self.blueprints: + self.blueprints.add_tx(tx) + + if self.blueprint_history: + self.blueprint_history.add_tx(tx) + # We need to check r1 as well to make sure we don't count twice the transactions/blocks that are # just changing from voided to executed or vice-versa if r1 and r3: @@ -243,6 +272,14 @@ def del_tx(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: self.addresses.remove_tx(tx) if self.utxo: self.utxo.del_tx(tx) + if self.nc_creation: + self.nc_creation.del_tx(tx) + if self.nc_history: + self.nc_history.remove_tx(tx) + if self.blueprints: + self.blueprints.del_tx(tx) + if self.blueprint_history: + self.blueprint_history.remove_tx(tx) self.info.update_counts(tx, remove=True) # mempool will pick-up if the transaction is voided/invalid and remove it @@ -285,6 +322,10 @@ def __init__(self, rocksdb_storage: 'RocksDBStorage', *, settings: HathorSetting self.tokens = None self.utxo = None self.mempool_tips = None + self.nc_creation = None + self.nc_history = None + self.blueprints = None + self.blueprint_history = None # XXX: this has to be at the end of __init__, after everything has been initialized self.__init_checks__() @@ -309,3 +350,16 @@ def enable_mempool_index(self) -> None: if self.mempool_tips is None: # XXX: use of RocksDBMempoolTipsIndex is very slow and was suspended self.mempool_tips = MemoryMempoolTipsIndex(settings=self.settings) + + def enable_nc_indices(self) -> None: + from hathor.indexes.blueprint_timestamp_index import BlueprintTimestampIndex + from hathor.indexes.rocksdb_blueprint_history_index import RocksDBBlueprintHistoryIndex + from hathor.indexes.rocksdb_nc_history_index import RocksDBNCHistoryIndex + if self.nc_creation is None: + self.nc_creation = NCCreationIndex(self._db) + if self.nc_history is None: + self.nc_history = RocksDBNCHistoryIndex(self._db) + if self.blueprints is None: + self.blueprints = BlueprintTimestampIndex(self._db) + if self.blueprint_history is None: + self.blueprint_history = RocksDBBlueprintHistoryIndex(self._db) diff --git a/hathor/nanocontracts/on_chain_blueprint.py b/hathor/nanocontracts/on_chain_blueprint.py index 14deea332..687e6389e 100644 --- a/hathor/nanocontracts/on_chain_blueprint.py +++ b/hathor/nanocontracts/on_chain_blueprint.py @@ -147,7 +147,14 @@ class Code: settings: InitVar[HathorSettings] def __post_init__(self, settings: HathorSettings) -> None: - raise NotImplementedError('temporarily removed during nano merge') + # used to initialize self.text with + match self.kind: + case CodeKind.PYTHON_ZLIB: + text = _decompress_code(self.data, settings.NC_ON_CHAIN_BLUEPRINT_CODE_MAX_SIZE_UNCOMPRESSED) + # set self.text using object.__setattr__ to bypass frozen protection + object.__setattr__(self, 'text', text) + case _: + raise ValueError('Invalid code kind value') def __bytes__(self) -> bytes: # Code serialization format: [kind:variable bytes][null byte][data:variable bytes] diff --git a/hathor/nanocontracts/sorter/random_sorter.py b/hathor/nanocontracts/sorter/random_sorter.py index a8a10be10..ffdb70f1f 100644 --- a/hathor/nanocontracts/sorter/random_sorter.py +++ b/hathor/nanocontracts/sorter/random_sorter.py @@ -82,6 +82,26 @@ def create_from_block(cls, block: Block, nc_calls: list[Transaction]) -> Self: for txin in tx.inputs: sorter.add_edge(tx.hash, txin.tx_id) + # Add edges from nano seqnum. + tx_info_list = [] + for tx in nc_calls: + assert tx.is_nano_contract() + nano_header = tx.get_nano_header() + tx_info_list.append((nano_header.nc_address, nano_header.nc_seqnum, tx.hash)) + + tx_info_list.sort() + for i in range(1, len(tx_info_list)): + prev_address, prev_seqnum, prev_hash = tx_info_list[i - 1] + curr_address, curr_seqnum, curr_hash = tx_info_list[i] + + if curr_address != prev_address: + # Address is different, so do nothing. + continue + + # XXX What to do if seqnums are the same?! + assert curr_seqnum > prev_seqnum + sorter.add_edge(curr_hash, prev_hash) + # Remove all transactions that do not belong to nc_calls. allowed_keys = set(tx.hash for tx in nc_calls) to_be_removed = [key for key in sorter.db.keys() if key not in allowed_keys] diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 56371da69..422deb7eb 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -555,7 +555,6 @@ def connect_to_peer_from_connection_queue(self) -> None: """ It is called by the `lc_connect` looping call and tries to connect to a new peer. """ if not self.new_connection_from_queue: - self.log.debug('connection queue is empty') return assert self.manager is not None self.log.debug('connect to peer from connection queue') diff --git a/hathor/reward_lock/reward_lock.py b/hathor/reward_lock/reward_lock.py index 9f012ab7e..03731fbf6 100644 --- a/hathor/reward_lock/reward_lock.py +++ b/hathor/reward_lock/reward_lock.py @@ -34,14 +34,14 @@ def iter_spent_rewards(tx: 'Transaction', storage: 'VertexStorageProtocol') -> I yield spent_tx -def is_spent_reward_locked(settings: HathorSettings, tx: 'Transaction') -> bool: +def is_spent_reward_locked(settings: 'HathorSettings', tx: 'Transaction') -> bool: """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx itself, and not the inherited `min_height`""" return get_spent_reward_locked_info(settings, tx, not_none(tx.storage)) is not None def get_spent_reward_locked_info( - settings: HathorSettings, + settings: 'HathorSettings', tx: 'Transaction', storage: 'VertexStorageProtocol', ) -> Optional['RewardLockedInfo']: @@ -71,7 +71,7 @@ def get_minimum_best_height(storage: 'VertexStorageProtocol') -> int: return best_height -def _spent_reward_needed_height(settings: HathorSettings, block: Block, best_height: int) -> int: +def _spent_reward_needed_height(settings: 'HathorSettings', block: Block, best_height: int) -> int: """ Returns height still needed to unlock this `block` reward: 0 means it's unlocked.""" spent_height = block.get_height() spend_blocks = best_height - spent_height diff --git a/hathor/transaction/exceptions.py b/hathor/transaction/exceptions.py index 7560bef59..2ffebb7a6 100644 --- a/hathor/transaction/exceptions.py +++ b/hathor/transaction/exceptions.py @@ -50,8 +50,8 @@ class InvalidInputDataSize(TxValidationError): """Input data is too big""" -class NoInputError(TxValidationError): - """There is not input""" +class TooFewInputs(TxValidationError): + """There are less inputs than the minimum required""" class InvalidScriptError(TxValidationError): diff --git a/hathor/transaction/merge_mined_block.py b/hathor/transaction/merge_mined_block.py index 863909882..3889cdb4d 100644 --- a/hathor/transaction/merge_mined_block.py +++ b/hathor/transaction/merge_mined_block.py @@ -16,6 +16,8 @@ from typing import TYPE_CHECKING, Any, Optional +from typing_extensions import Self + from hathor.transaction.aux_pow import BitcoinAuxPow from hathor.transaction.base_transaction import TxOutput, TxVersion from hathor.transaction.block import Block @@ -67,7 +69,7 @@ def _get_formatted_fields_dict(self, short: bool = True) -> dict[str, str]: @classmethod def create_from_struct(cls, struct_bytes: bytes, storage: Optional['TransactionStorage'] = None, - *, verbose: VerboseCallback = None) -> 'MergeMinedBlock': + *, verbose: VerboseCallback = None) -> Self: blc = cls() buf = blc.get_fields_from_struct(struct_bytes, verbose=verbose) blc.aux_pow = BitcoinAuxPow.from_bytes(buf) diff --git a/hathor/transaction/scripts/execute.py b/hathor/transaction/scripts/execute.py index 23109afbc..b19ab6c0a 100644 --- a/hathor/transaction/scripts/execute.py +++ b/hathor/transaction/scripts/execute.py @@ -13,14 +13,20 @@ # limitations under the License. import struct +from dataclasses import dataclass from typing import NamedTuple, Optional, Union from hathor.transaction import BaseTransaction, Transaction, TxInput from hathor.transaction.exceptions import DataIndexError, FinalStackInvalid, InvalidScriptError, OutOfData -class ScriptExtras(NamedTuple): +@dataclass(slots=True, frozen=True, kw_only=True) +class ScriptExtras: tx: Transaction + + +@dataclass(slots=True, frozen=True, kw_only=True) +class UtxoScriptExtras(ScriptExtras): txin: TxInput spent_tx: BaseTransaction @@ -103,10 +109,15 @@ def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> No :raises ScriptError: if script verification fails """ - input_data = txin.data - output_script = spent_tx.outputs[txin.index].script + raw_script_eval( + input_data=txin.data, + output_script=spent_tx.outputs[txin.index].script, + extras=UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx), + ) + + +def raw_script_eval(*, input_data: bytes, output_script: bytes, extras: ScriptExtras) -> None: log: list[str] = [] - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) from hathor.transaction.scripts import MultiSig if MultiSig.re_match.search(output_script): @@ -115,12 +126,12 @@ def script_eval(tx: Transaction, txin: TxInput, spent_tx: BaseTransaction) -> No # we can't use input_data + output_script because it will end with an invalid stack # i.e. the signatures will still be on the stack after ouput_script is executed redeem_script_pos = MultiSig.get_multisig_redeem_script_pos(input_data) - full_data = txin.data[redeem_script_pos:] + output_script + full_data = input_data[redeem_script_pos:] + output_script execute_eval(full_data, log, extras) # Second, we need to validate that the signatures on the input_data solves the redeem_script # we pop and append the redeem_script to the input_data and execute it - multisig_data = MultiSig.get_multisig_data(extras.txin.data) + multisig_data = MultiSig.get_multisig_data(input_data) execute_eval(multisig_data, log, extras) else: # merge input_data and output_script diff --git a/hathor/transaction/scripts/opcode.py b/hathor/transaction/scripts/opcode.py index 460c66821..eddaecfbb 100644 --- a/hathor/transaction/scripts/opcode.py +++ b/hathor/transaction/scripts/opcode.py @@ -37,7 +37,14 @@ TimeLocked, VerifyFailed, ) -from hathor.transaction.scripts.execute import Stack, binary_to_int, decode_opn, get_data_value, get_script_op +from hathor.transaction.scripts.execute import ( + Stack, + UtxoScriptExtras, + binary_to_int, + decode_opn, + get_data_value, + get_script_op, +) from hathor.transaction.scripts.script_context import ScriptContext @@ -178,6 +185,7 @@ def op_greaterthan_timestamp(context: ScriptContext) -> None: buf = context.stack.pop() assert isinstance(buf, bytes) (timelock,) = struct.unpack('!I', buf) + assert isinstance(context.extras, UtxoScriptExtras) if context.extras.tx.timestamp <= timelock: raise TimeLocked('The output is locked until {}'.format( datetime.datetime.fromtimestamp(timelock).strftime("%m/%d/%Y %I:%M:%S %p"))) @@ -497,6 +505,7 @@ def op_find_p2pkh(context: ScriptContext) -> None: raise MissingStackItems('OP_FIND_P2PKH: empty stack') from hathor.transaction.scripts import P2PKH + assert isinstance(context.extras, UtxoScriptExtras) spent_tx = context.extras.spent_tx txin = context.extras.txin tx = context.extras.tx diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 70b90849f..5a82a42df 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from collections import OrderedDict from typing import TYPE_CHECKING, Any, Iterator, Optional @@ -27,6 +29,7 @@ if TYPE_CHECKING: from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.storage import NCStorageFactory class TransactionCacheStorage(BaseTransactionStorage): @@ -44,6 +47,7 @@ def __init__( capacity: int = 10000, *, settings: 'HathorSettings', + nc_storage_factory: NCStorageFactory, indexes: Optional[IndexesManager], _clone_if_needed: bool = False, ) -> None: @@ -81,7 +85,7 @@ def __init__( # we need to use only one weakref dict, so we must first initialize super, and then # attribute the same weakref for both. - super().__init__(indexes=indexes, settings=settings) + super().__init__(indexes=indexes, settings=settings, nc_storage_factory=nc_storage_factory) self._tx_weakref = store._tx_weakref # XXX: just to make sure this isn't being used anywhere, setters/getters should be used instead del self._allow_scope diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index 26a3b1c4b..63c3d4fda 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger @@ -29,6 +31,7 @@ import rocksdb from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.storage import NCStorageFactory from hathor.transaction import BaseTransaction logger = get_logger() @@ -54,6 +57,7 @@ def __init__( *, settings: 'HathorSettings', vertex_parser: VertexParser, + nc_storage_factory: NCStorageFactory, ) -> None: self._cf_tx = rocksdb_storage.get_or_create_column_family(_CF_NAME_TX) self._cf_meta = rocksdb_storage.get_or_create_column_family(_CF_NAME_META) @@ -64,7 +68,7 @@ def __init__( self._rocksdb_storage = rocksdb_storage self._db = rocksdb_storage.get_db() self.vertex_parser = vertex_parser - super().__init__(indexes=indexes, settings=settings) + super().__init__(indexes=indexes, settings=settings, nc_storage_factory=nc_storage_factory) def _load_from_bytes(self, tx_data: bytes, meta_data: bytes) -> 'BaseTransaction': from hathor.transaction.transaction_metadata import TransactionMetadata diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index cb98e0ccc..fd8b2f39a 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -53,6 +53,11 @@ if TYPE_CHECKING: from hathor.conf.settings import HathorSettings + from hathor.nanocontracts import OnChainBlueprint + from hathor.nanocontracts.blueprint import Blueprint + from hathor.nanocontracts.catalog import NCBlueprintCatalog + from hathor.nanocontracts.storage import NCBlockStorage, NCContractStorage, NCStorageFactory + from hathor.nanocontracts.types import BlueprintId, ContractId cpu = get_cpu_profiler() @@ -77,6 +82,7 @@ class TransactionStorage(ABC): pubsub: Optional[PubSubManager] indexes: Optional[IndexesManager] _latest_n_height_tips: list[HeightInfo] + nc_catalog: Optional['NCBlueprintCatalog'] = None log = get_logger() @@ -101,8 +107,9 @@ class TransactionStorage(ABC): _migrations: list[BaseMigration] - def __init__(self, *, settings: HathorSettings) -> None: + def __init__(self, *, settings: HathorSettings, nc_storage_factory: NCStorageFactory) -> None: self._settings = settings + self._nc_storage_factory = nc_storage_factory # Weakref is used to guarantee that there is only one instance of each transaction in memory. self._tx_weakref: WeakValueDictionary[bytes, BaseTransaction] = WeakValueDictionary() self._tx_weakref_disabled: bool = False @@ -1131,6 +1138,92 @@ def partial_vertex_exists(self, vertex_id: VertexId) -> bool: with self.allow_partially_validated_context(): return self.transaction_exists(vertex_id) + def get_nc_block_storage(self, block: Block) -> NCBlockStorage: + """Return a block storage for the given block.""" + return self._nc_storage_factory.get_block_storage_from_block(block) + + def get_nc_storage(self, block: Block, contract_id: ContractId) -> NCContractStorage: + """Return a contract storage with the contract state at a given block.""" + from hathor.nanocontracts.types import ContractId, VertexId as NCVertexId + if not block.is_genesis: + block_storage = self._nc_storage_factory.get_block_storage_from_block(block) + else: + block_storage = self._nc_storage_factory.get_empty_block_storage() + + try: + contract_storage = block_storage.get_contract_storage(ContractId(NCVertexId(contract_id))) + except KeyError: + from hathor.nanocontracts.exception import NanoContractDoesNotExist + raise NanoContractDoesNotExist(contract_id.hex()) + return contract_storage + + def _get_blueprint(self, blueprint_id: BlueprintId) -> type[Blueprint] | OnChainBlueprint: + from hathor.nanocontracts.exception import BlueprintDoesNotExist + assert self.nc_catalog is not None + + try: + return self.nc_catalog.get_blueprint_class(blueprint_id) + except BlueprintDoesNotExist as e: + self.log.debug('blueprint-id not in the catalog', blueprint_id=blueprint_id.hex()) + if not self._settings.ENABLE_ON_CHAIN_BLUEPRINTS: + raise e + self.log.debug('on-chain blueprints enabled, looking for that instead') + return self.get_on_chain_blueprint(blueprint_id) + + def get_blueprint_source(self, blueprint_id: BlueprintId) -> str: + """Returns the source code associated with the given blueprint_id. + + The blueprint class could be in the catalog (first search), or it could be the tx_id of an on-chain blueprint. + + A point of difference is that an OCB will have a `__blueprint__ = BlueprintName` line, where a built-in + blueprint will not. + """ + import inspect + + from hathor.nanocontracts import OnChainBlueprint + + blueprint = self._get_blueprint(blueprint_id) + if isinstance(blueprint, OnChainBlueprint): + return self.get_on_chain_blueprint(blueprint_id).code.text + else: + module = inspect.getmodule(blueprint) + assert module is not None + return inspect.getsource(module) + + def get_blueprint_class(self, blueprint_id: BlueprintId) -> type[Blueprint]: + """Returns the blueprint class associated with the given blueprint_id. + + The blueprint class could be in the catalog (first search), or it could be the tx_id of an on-chain blueprint. + """ + from hathor.nanocontracts import OnChainBlueprint + blueprint = self._get_blueprint(blueprint_id) + if isinstance(blueprint, OnChainBlueprint): + return blueprint.get_blueprint_class() + else: + return blueprint + + def get_on_chain_blueprint(self, blueprint_id: BlueprintId) -> OnChainBlueprint: + """Return an on-chain blueprint transaction.""" + assert self._settings.ENABLE_ON_CHAIN_BLUEPRINTS + from hathor.nanocontracts import OnChainBlueprint + from hathor.nanocontracts.exception import ( + BlueprintDoesNotExist, + OCBBlueprintNotConfirmed, + OCBInvalidBlueprintVertexType, + ) + try: + blueprint_tx = self.get_transaction(blueprint_id) + except TransactionDoesNotExist: + self.log.debug('no transaction with the given id found', blueprint_id=blueprint_id.hex()) + raise BlueprintDoesNotExist(blueprint_id.hex()) + if not isinstance(blueprint_tx, OnChainBlueprint): + raise OCBInvalidBlueprintVertexType(blueprint_id.hex()) + tx_meta = blueprint_tx.get_metadata() + if tx_meta.voided_by or not tx_meta.first_block: + raise OCBBlueprintNotConfirmed(blueprint_id.hex()) + # XXX: maybe use N blocks confirmation, like reward-locks + return blueprint_tx + class BaseTransactionStorage(TransactionStorage): indexes: Optional[IndexesManager] @@ -1141,8 +1234,9 @@ def __init__( pubsub: Optional[Any] = None, *, settings: HathorSettings, + nc_storage_factory: NCStorageFactory, ) -> None: - super().__init__(settings=settings) + super().__init__(settings=settings, nc_storage_factory=nc_storage_factory) # Pubsub is used to publish tx voided and winner but it's optional self.pubsub = pubsub diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index fdfbf544b..da2c441b5 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -21,6 +21,7 @@ from typing_extensions import override from hathor.checkpoint import Checkpoint +from hathor.crypto.util import get_address_b58_from_bytes from hathor.exception import InvalidNewTransaction from hathor.transaction import TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE, GenericVertex @@ -255,11 +256,36 @@ def get_token_uid(self, index: int) -> TokenUid: return self._settings.HATHOR_TOKEN_UID return self.tokens[index - 1] + def get_related_addresses(self) -> set[str]: + ret = super().get_related_addresses() + if self.is_nano_contract(): + nano_header = self.get_nano_header() + ret.add(get_address_b58_from_bytes(nano_header.nc_address)) + return ret + def to_json(self, decode_script: bool = False, include_metadata: bool = False) -> dict[str, Any]: json = super().to_json(decode_script=decode_script, include_metadata=include_metadata) json['tokens'] = [h.hex() for h in self.tokens] + + if self.is_nano_contract(): + nano_header = self.get_nano_header() + json['nc_id'] = nano_header.get_contract_id().hex() + json['nc_seqnum'] = nano_header.nc_seqnum + json['nc_blueprint_id'] = nano_header.get_blueprint_id().hex() + json['nc_method'] = nano_header.nc_method + json['nc_args'] = nano_header.nc_args_bytes.hex() + json['nc_address'] = get_address_b58_from_bytes(nano_header.nc_address) + json['nc_context'] = nano_header.get_context().to_json() + return json + def to_json_extended(self) -> dict[str, Any]: + json_extended = super().to_json_extended() + if self.is_nano_contract(): + json = self.to_json() + return {**json, **json_extended} + return json_extended + def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: assert self.storage is not None if self.is_genesis: @@ -281,6 +307,13 @@ def get_complete_token_info(self) -> dict[TokenUid, TokenInfo]: return token_dict + def get_minimum_number_of_inputs(self) -> int: + """Return the minimum number of inputs for this transaction. + This is used by the verification services.""" + if self.is_nano_contract(): + return 0 + return 1 + def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ diff --git a/hathor/transaction/util.py b/hathor/transaction/util.py index d1bec3832..5239f8ac3 100644 --- a/hathor/transaction/util.py +++ b/hathor/transaction/util.py @@ -17,8 +17,11 @@ import re import struct from math import ceil, floor +from struct import error as StructError from typing import TYPE_CHECKING, Any, Callable, Optional +from hathor.transaction.exceptions import InvalidOutputValue, TransactionDataError + if TYPE_CHECKING: from hathor.conf.settings import HathorSettings @@ -42,13 +45,14 @@ def bytes_to_int(data: bytes, *, signed: bool = False) -> int: return int.from_bytes(data, byteorder='big', signed=signed) -def unpack(fmt: str, buf: bytes) -> Any: +def unpack(fmt: str, buf: bytes | memoryview) -> tuple[Any, bytes | memoryview]: size = struct.calcsize(fmt) return struct.unpack(fmt, buf[:size]), buf[size:] -def unpack_len(n: int, buf: bytes) -> tuple[bytes, bytes]: - return buf[:n], buf[n:] +def unpack_len(n: int, buf: bytes | memoryview) -> tuple[bytes, bytes | memoryview]: + ret = buf[:n] if isinstance(buf, bytes) else bytes(buf[:n]) + return ret, buf[n:] def get_deposit_amount(settings: HathorSettings, mint_amount: int) -> int: @@ -64,3 +68,52 @@ def clean_token_string(string: str) -> str: It sets to uppercase, removes double spaces and spaces at the beginning and end. """ return re.sub(r'\s\s+', ' ', string).strip().upper() + + +def decode_string_utf8(encoded: bytes, key: str) -> str: + """ Raises StructError in case it's not a valid utf-8 string + """ + try: + decoded = encoded.decode('utf-8') + return decoded + except UnicodeDecodeError: + raise StructError('{} must be a valid utf-8 string.'.format(key)) + + +def bytes_to_output_value(data: bytes) -> tuple[int, bytes]: + from hathor.serialization import BadDataError, Deserializer + from hathor.serialization.encoding.output_value import decode_output_value + deserializer = Deserializer.build_bytes_deserializer(data) + try: + output_value = decode_output_value(deserializer) + except BadDataError as e: + raise InvalidOutputValue(*e.args) + remaining_data = deserializer.read_all() + return (output_value, remaining_data) + + +def output_value_to_bytes(number: int) -> bytes: + from hathor.serialization import Serializer + from hathor.serialization.encoding.output_value import encode_output_value + serializer = Serializer.build_bytes_serializer() + try: + encode_output_value(serializer, number) + except ValueError as e: + raise InvalidOutputValue(*e.args) + return bytes(serializer.finalize()) + + +def validate_token_name_and_symbol(settings: HathorSettings, token_name: str, token_symbol: str) -> None: + """Validate token_name and token_symbol before creating a new token.""" + name_len = len(token_name) + symbol_len = len(token_symbol) + if name_len == 0 or name_len > settings.MAX_LENGTH_TOKEN_NAME: + raise TransactionDataError('Invalid token name length ({})'.format(name_len)) + if symbol_len == 0 or symbol_len > settings.MAX_LENGTH_TOKEN_SYMBOL: + raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) + + # Can't create token with hathor name or symbol + if clean_token_string(token_name) == clean_token_string(settings.HATHOR_TOKEN_NAME): + raise TransactionDataError('Invalid token name ({})'.format(token_name)) + if clean_token_string(token_symbol) == clean_token_string(settings.HATHOR_TOKEN_SYMBOL): + raise TransactionDataError('Invalid token symbol ({})'.format(token_symbol)) diff --git a/hathor/types.py b/hathor/types.py index 7dfa808aa..d264b93af 100644 --- a/hathor/types.py +++ b/hathor/types.py @@ -14,16 +14,17 @@ from typing import TypeAlias -# XXX There is a lot of refactor to be done before we can use `NewType`. -# So, let's skip using NewType until everything is refactored. +# XXX: All of these types already have an equivalent NewType available on `hathor.nanoconracts.types`, the next step is +# to refactor the places which use `hathor.types`, which is still a lot. Some of these would also benefit from +# using custom classes like `Hash` for better str/repr. -VertexId: TypeAlias = bytes # NewType('TxId', bytes) Address: TypeAlias = bytes # NewType('Address', bytes) AddressB58: TypeAlias = str -TxOutputScript: TypeAlias = bytes # NewType('TxOutputScript', bytes) +Amount: TypeAlias = int # NewType('Amount', int) Timestamp: TypeAlias = int # NewType('Timestamp', int) +TxOutputScript: TypeAlias = bytes # NewType('TxOutputScript', bytes) +VertexId: TypeAlias = bytes # NewType('VertexId', bytes) TokenUid: TypeAlias = VertexId # NewType('TokenUid', VertexId) -Amount: TypeAlias = int # NewType('Amount', int) class Hash: diff --git a/hathor/util.py b/hathor/util.py index 755a1d381..0a87d371c 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -12,13 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations + import datetime import gc import json import math import sys import time -import warnings from collections import OrderedDict from contextlib import AbstractContextManager from dataclasses import asdict, dataclass @@ -36,6 +37,7 @@ import structlog from hathor.transaction.base_transaction import BaseTransaction + from hathor.wallet import HDWallet logger = get_logger() @@ -67,23 +69,6 @@ def practically_equal(a: dict[Any, Any], b: dict[Any, Any]) -> bool: return True -def deprecated(msg: str) -> Callable[..., Any]: - """Use to indicate that a function or method has been deprecated.""" - warnings.simplefilter('default', DeprecationWarning) - - def decorator(func: Callable[..., Any]) -> Callable[..., Any]: - @wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> Any: - # warnings.warn('{} is deprecated. {}'.format(func.__name__, msg), - # category=DeprecationWarning, stacklevel=2) - return func(*args, **kwargs) - - wrapper.__deprecated = func # type: ignore - return wrapper - - return decorator - - def skip_warning(func: Callable[..., Any]) -> Callable[..., Any]: f = cast(Callable[..., Any], getattr(func, '__deprecated', func)) if hasattr(func, '__self__') and not hasattr(f, '__self__'): @@ -377,6 +362,37 @@ def skip_n(it: Iterator[_T], n: int) -> Iterator[_T]: return it +def skip_until(it: Iterator[_T], condition: Callable[[_T], bool]) -> Iterator[_T]: + """ Skip all elements and stops after condition is True, it will also skip the element where condition is True. + + Example: + + >>> list(skip_until(iter(range(10)), lambda x: x == 0)) + [1, 2, 3, 4, 5, 6, 7, 8, 9] + + >>> list(skip_until(iter(range(10)), lambda x: x > 0)) + [2, 3, 4, 5, 6, 7, 8, 9] + + >>> list(skip_until(iter(range(10)), lambda x: x == 8)) + [9] + + >>> list(skip_until(iter(range(10)), lambda x: x == 9)) + [] + + >>> list(skip_until(iter(range(10)), lambda x: x == 10)) + [] + """ + while True: + try: + i = next(it) + except StopIteration: + return it + else: + if condition(i): + break + return it + + _DT_ITER_NEXT_WARN = 3 # time in seconds to warn when `next(iter_tx)` takes too long _DT_LOG_PROGRESS = 30 # time in seconds after which a progress will be logged (it can take longer, but not shorter) _DT_YIELD_WARN = 1 # time in seconds to warn when `yield tx` takes too long (which is when processing happens) @@ -823,3 +839,19 @@ def bytes_to_vertexid(data: bytes) -> VertexId: if len(data) != 32: raise ValueError('length must be exactly 32 bytes') return VertexId(data) + + +def bytes_from_hex(hex_str: str) -> bytes | None: + """Convert a hex string to bytes or return None if it's invalid.""" + try: + return bytes.fromhex(hex_str) + except ValueError: + return None + + +def initialize_hd_wallet(words: str) -> HDWallet: + """Get an initialized HDWallet from the provided words.""" + from hathor.wallet import HDWallet + hd = HDWallet(words=words) + hd._manually_initialize() + return hd diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 153fedd10..5381d7b3a 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -30,10 +30,10 @@ InvalidInputData, InvalidInputDataSize, InvalidToken, - NoInputError, RewardLocked, ScriptError, TimestampError, + TooFewInputs, TooManyInputs, TooManySigOps, WeightError, @@ -199,9 +199,10 @@ def verify_number_of_inputs(self, tx: Transaction) -> None: if len(tx.inputs) > self._settings.MAX_NUM_INPUTS: raise TooManyInputs('Maximum number of inputs exceeded') - if len(tx.inputs) == 0: + minimum = tx.get_minimum_number_of_inputs() + if len(tx.inputs) < minimum: if not tx.is_genesis: - raise NoInputError('Transaction must have at least one input') + raise TooFewInputs(f'Transaction must have at least {minimum} input(s)') def verify_output_token_indexes(self, tx: Transaction) -> None: """Verify outputs reference an existing token uid in the tokens list diff --git a/hathor/version_resource.py b/hathor/version_resource.py index 57d2801f2..a0942e2cb 100644 --- a/hathor/version_resource.py +++ b/hathor/version_resource.py @@ -43,6 +43,7 @@ def render_GET(self, request): data = { 'version': hathor.__version__, 'network': self.manager.network, + 'nano_contracts_enabled': self._settings.ENABLE_NANO_CONTRACTS, 'min_weight': self._settings.MIN_TX_WEIGHT, # DEPRECATED 'min_tx_weight': self._settings.MIN_TX_WEIGHT, 'min_tx_weight_coefficient': self._settings.MIN_TX_WEIGHT_COEFFICIENT, diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index f7ac4470d..afc2fa0e0 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -263,4 +263,7 @@ def _log_new_object(self, tx: BaseTransaction, message_fmt: str, *, quiet: bool) log_func = self._log.info else: log_func = self._log.debug + + if tx.name: + kwargs['__name'] = tx.name log_func(message, **kwargs) diff --git a/hathor/wallet/resources/thin_wallet/address_history.py b/hathor/wallet/resources/thin_wallet/address_history.py index db0e2c221..4fc8fabf8 100644 --- a/hathor/wallet/resources/thin_wallet/address_history.py +++ b/hathor/wallet/resources/thin_wallet/address_history.py @@ -71,7 +71,7 @@ def render_POST(self, request: Request) -> bytes: addresses = post_data['addresses'] assert isinstance(addresses, list) - return self.get_address_history(addresses, post_data.get('hash')) + return self.get_address_history(addresses, post_data.get('hash'), post_data.get('tx_version')) def render_GET(self, request: Request) -> bytes: """ GET request for /thin_wallet/address_history/ @@ -135,7 +135,17 @@ def render_GET(self, request: Request) -> bytes: # If hash parameter is in the request, it must be a valid hex ref_hash = raw_args[b'hash'][0].decode('utf-8') - return self.get_address_history([address.decode('utf-8') for address in addresses], ref_hash) + allowed_tx_versions_arg = raw_args.get(b'tx_version[]', None) + allowed_tx_versions = ( + set([int(tx_version.decode('utf-8')) for tx_version in allowed_tx_versions_arg]) + if allowed_tx_versions_arg is not None + else None + ) + return self.get_address_history( + [address.decode('utf-8') for address in addresses], + ref_hash, + allowed_tx_versions + ) def _validate_index(self, request: Request) -> bytes | None: """Return None if validation is successful (addresses index is enabled), and an error message otherwise.""" @@ -149,7 +159,10 @@ def _validate_index(self, request: Request) -> bytes | None: request.setResponseCode(503) return json_dumpb({'success': False, 'message': 'wallet index is disabled'}) - def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> bytes: + def get_address_history(self, + addresses: list[str], + ref_hash: Optional[str], + allowed_tx_versions: Optional[set[int]]) -> bytes: ref_hash_bytes = None if ref_hash: try: @@ -209,6 +222,10 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> if tx_hash not in seen: tx = self.manager.tx_storage.get_transaction(tx_hash) + if allowed_tx_versions and tx.version not in allowed_tx_versions: + # Transaction version is not in the version filter + continue + tx_elements = len(tx.inputs) + len(tx.outputs) if total_elements + tx_elements > self.max_inputs_outputs_address_history: # If the adition of this tx overcomes the maximum number of inputs and outputs, then break @@ -277,6 +294,22 @@ def get_address_history(self, addresses: list[str], ref_hash: Optional[str]) -> 'type': 'string' } }, + { + 'name': 'hash', + 'in': 'query', + 'description': 'Hash used to paginate the request.', + 'schema': { + 'type': 'string' + } + }, + { + 'name': 'tx_version[]', + 'in': 'query', + 'description': 'List of versions to filter the transactions.', + 'schema': { + 'type': 'int' + } + }, ], 'responses': { '200': { diff --git a/tests/consensus/test_first_block.py b/tests/consensus/test_first_block.py index 78d58763a..4b291989d 100644 --- a/tests/consensus/test_first_block.py +++ b/tests/consensus/test_first_block.py @@ -1,5 +1,6 @@ from hathor.transaction import Block, Transaction from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder class FirstBlockTestCase(unittest.TestCase): @@ -16,7 +17,7 @@ def setUp(self) -> None: .set_cpu_mining_service(cpu_mining_service) self.manager = self.create_peer_from_builder(builder) - self.dag_builder = self.get_dag_builder(self.manager) + self.dag_builder = TestDAGBuilder.from_manager(self.manager) def test_first_block(self) -> None: artifacts = self.dag_builder.build_from_str(""" diff --git a/tests/dag_builder/builder.py b/tests/dag_builder/builder.py new file mode 100644 index 000000000..114776322 --- /dev/null +++ b/tests/dag_builder/builder.py @@ -0,0 +1,48 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from types import ModuleType + +from mnemonic import Mnemonic + +from hathor.dag_builder import DAGBuilder +from hathor.dag_builder.types import WalletFactoryType +from hathor.manager import HathorManager +from hathor.util import Random +from hathor.wallet import HDWallet +from tests.utils import GENESIS_SEED + + +class TestDAGBuilder: + @staticmethod + def create_random_hd_wallet(rng: Random) -> HDWallet: + m = Mnemonic('english') + words = m.to_mnemonic(rng.randbytes(32)) + hd = HDWallet(words=words) + hd._manually_initialize() + return hd + + @staticmethod + def from_manager( + manager: HathorManager, + genesis_words: str | None = None, + wallet_factory: WalletFactoryType | None = None, + blueprints_module: ModuleType | None = None + ) -> DAGBuilder: + """Create a DAGBuilder instance from a HathorManager instance.""" + return DAGBuilder.from_manager( + manager=manager, + genesis_words=genesis_words or GENESIS_SEED, + wallet_factory=wallet_factory or (lambda: TestDAGBuilder.create_random_hd_wallet(manager.rng)), + ) diff --git a/tests/dag_builder/test_dag_builder.py b/tests/dag_builder/test_dag_builder.py index 996ead1cf..b67afa894 100644 --- a/tests/dag_builder/test_dag_builder.py +++ b/tests/dag_builder/test_dag_builder.py @@ -1,6 +1,7 @@ from hathor.transaction import Block, Transaction from hathor.transaction.token_creation_tx import TokenCreationTransaction from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder class DAGBuilderTestCase(unittest.TestCase): @@ -17,7 +18,8 @@ def setUp(self): .set_cpu_mining_service(cpu_mining_service) self.manager = self.create_peer_from_builder(builder) - self.dag_builder = self.get_dag_builder(self.manager) + self.nc_catalog = self.manager.tx_storage.nc_catalog + self.dag_builder = TestDAGBuilder.from_manager(self.manager) def test_one_tx(self) -> None: artifacts = self.dag_builder.build_from_str(""" diff --git a/tests/others/test_init_manager.py b/tests/others/test_init_manager.py index df6145104..71b844abf 100644 --- a/tests/others/test_init_manager.py +++ b/tests/others/test_init_manager.py @@ -14,10 +14,14 @@ class ModifiedTransactionRocksDBStorage(TransactionRocksDBStorage): def __init__(self, path: str, settings: HathorSettings): + from hathor.nanocontracts.storage import NCRocksDBStorageFactory + rocksdb_storage = RocksDBStorage(path=path) + nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) super().__init__( - rocksdb_storage=RocksDBStorage(path=path), + rocksdb_storage=rocksdb_storage, settings=settings, vertex_parser=VertexParser(settings=settings), + nc_storage_factory=nc_storage_factory, ) self._first_tx: BaseTransaction | None = None diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index c3d50e969..e4c6decd2 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -116,6 +116,7 @@ def _init_manager(path: tempfile.TemporaryDirectory | None = None) -> HathorMana b'timestamp-sorted-all': 0.0, b'timestamp-sorted-blocks': 0.0, b'timestamp-sorted-txs': 0.0, + b'nc-state': 0.0, }) manager.tx_storage.pre_init() @@ -175,6 +176,7 @@ def _init_manager(path: tempfile.TemporaryDirectory | None = None) -> HathorMana b'timestamp-sorted-all': 0.0, b'timestamp-sorted-blocks': 0.0, b'timestamp-sorted-txs': 0.0, + b'nc-state': 0.0, }) manager.tx_storage.pre_init() @@ -253,14 +255,24 @@ def test_cache_data_collection(self): """Test if cache-related data is correctly being collected from the TransactionCacheStorage """ + from hathor.nanocontracts.storage import NCRocksDBStorageFactory + # Preparation rocksdb_storage = self.create_rocksdb_storage() + nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) base_storage = TransactionRocksDBStorage( rocksdb_storage=rocksdb_storage, settings=self._settings, vertex_parser=VertexParser(settings=self._settings), + nc_storage_factory=nc_storage_factory, + ) + tx_storage = TransactionCacheStorage( + base_storage, + self.clock, + indexes=None, + settings=self._settings, + nc_storage_factory=nc_storage_factory, ) - tx_storage = TransactionCacheStorage(base_storage, self.clock, indexes=None, settings=self._settings) manager = self.create_peer('testnet', tx_storage=tx_storage) diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index 215016b37..2bc5a382d 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -4,10 +4,10 @@ from hathor.storage.rocksdb_storage import RocksDBStorage from hathor.transaction import Transaction from hathor.transaction.vertex_parser import VertexParser -from hathor.util import iwindows +from hathor.util import initialize_hd_wallet, iwindows from hathor.wallet import Wallet from tests import unittest -from tests.utils import add_blocks_unlock_reward, add_custom_tx, add_new_tx, get_genesis_key +from tests.utils import DEFAULT_WORDS, add_blocks_unlock_reward, add_custom_tx, add_new_tx, get_genesis_key class BaseIndexesTest(unittest.TestCase): @@ -470,7 +470,7 @@ def test_utxo_index_after_push_tx(self): # spend that utxo and check that it is gone from the index address1 = self.get_address(1) - wallet = self.get_wallet() + wallet = initialize_hd_wallet(DEFAULT_WORDS) tx1 = Transaction( timestamp=int(self.clock.seconds()), weight=1.0, @@ -544,7 +544,7 @@ def test_utxo_index_last(self): change_value = 26 transfer_value = 6400 - change_value - wallet = self.get_wallet() + wallet = initialize_hd_wallet(DEFAULT_WORDS) tx1 = Transaction( timestamp=int(self.clock.seconds()), weight=1.0, @@ -693,6 +693,7 @@ class RocksDBIndexesTest(BaseIndexesTest): def setUp(self): import tempfile + from hathor.nanocontracts.storage import NCRocksDBStorageFactory from hathor.transaction.storage import TransactionRocksDBStorage super().setUp() @@ -701,7 +702,13 @@ def setUp(self): self.tmpdirs.append(directory) rocksdb_storage = RocksDBStorage(path=directory) parser = VertexParser(settings=self._settings) - self.tx_storage = TransactionRocksDBStorage(rocksdb_storage, settings=self._settings, vertex_parser=parser) + nc_storage_factory = NCRocksDBStorageFactory(rocksdb_storage) + self.tx_storage = TransactionRocksDBStorage( + rocksdb_storage, + settings=self._settings, + vertex_parser=parser, + nc_storage_factory=nc_storage_factory, + ) self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] diff --git a/tests/tx/test_scripts.py b/tests/tx/test_scripts.py index 73430f729..f3fef412a 100644 --- a/tests/tx/test_scripts.py +++ b/tests/tx/test_scripts.py @@ -18,17 +18,10 @@ TimeLocked, VerifyFailed, ) -from hathor.transaction.scripts import ( - P2PKH, - HathorScript, - MultiSig, - Opcode, - ScriptExtras, - create_base_script, - create_output_script, -) +from hathor.transaction.scripts import P2PKH, HathorScript, MultiSig, Opcode, create_base_script, create_output_script from hathor.transaction.scripts.construct import count_sigops, get_pushdata, get_sigops_count, re_compile from hathor.transaction.scripts.execute import ( + UtxoScriptExtras, binary_to_int, decode_opn, evaluate_final_stack, @@ -252,7 +245,7 @@ def test_checksig(self): signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) # wrong signature puts False (0) on stack stack = [b'aaaaaaaaa', pubkey_bytes] @@ -277,7 +270,7 @@ def test_checksig_cache(self): signature = self.genesis_private_key.sign(hashed_data, ec.ECDSA(hashes.SHA256())) pubkey_bytes = get_public_key_bytes_compressed(self.genesis_public_key) - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) stack = [signature, pubkey_bytes] self.assertIsNone(tx._sighash_data_cache) @@ -507,28 +500,28 @@ def test_find_p2pkh(self): # try with just 1 output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out_genesis)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs and correct output among them stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out_genesis), TxOutput(1, out3)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) self.assertEqual(stack.pop(), 1) # several outputs without correct amount output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(2, out_genesis), TxOutput(1, out3)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) with self.assertRaises(VerifyFailed): op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) # several outputs without correct address output stack = [genesis_address] tx = Transaction(outputs=[TxOutput(1, out1), TxOutput(1, out2), TxOutput(1, out3)]) - extras = ScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) + extras = UtxoScriptExtras(tx=tx, txin=txin, spent_tx=spent_tx) with self.assertRaises(VerifyFailed): op_find_p2pkh(ScriptContext(stack=stack, logs=[], extras=extras)) @@ -542,7 +535,7 @@ def test_greaterthan_timestamp(self): tx = Transaction() stack = [struct.pack('!I', timestamp)] - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) with self.assertRaises(TimeLocked): tx.timestamp = timestamp - 1 @@ -568,7 +561,7 @@ def test_checkmultisig(self): tx = Transaction(inputs=[txin], outputs=[txout]) data_to_sign = tx.get_sighash_all() - extras = ScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) + extras = UtxoScriptExtras(tx=tx, txin=Mock(), spent_tx=Mock()) wallet = HDWallet() wallet._manually_initialize() diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 5b00f8694..c62ab403f 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -23,10 +23,10 @@ InvalidInputDataSize, InvalidOutputScriptSize, InvalidOutputValue, - NoInputError, ParentDoesNotExist, PowError, TimestampError, + TooFewInputs, TooManyInputs, TooManyOutputs, TooManySigOps, @@ -135,7 +135,7 @@ def test_too_many_inputs(self): def test_no_inputs(self): tx = Transaction(inputs=[], storage=self.tx_storage) - with self.assertRaises(NoInputError): + with self.assertRaises(TooFewInputs): self._verifiers.tx.verify_number_of_inputs(tx) def test_too_many_outputs(self): diff --git a/tests/unittest.py b/tests/unittest.py index 65b162f3c..93f3bbfd1 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -1,8 +1,10 @@ import os +import re import secrets import shutil import tempfile import time +from contextlib import contextmanager from typing import Any, Callable, Collection, Iterable, Iterator, Optional from unittest import main as ut_main @@ -14,10 +16,10 @@ from hathor.conf.get_settings import get_global_settings from hathor.conf.settings import HathorSettings from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode -from hathor.dag_builder import DAGBuilder from hathor.event import EventManager from hathor.event.storage import EventStorage from hathor.manager import HathorManager +from hathor.nanocontracts.nc_exec_logs import NCLogConfig from hathor.p2p.peer import PrivatePeer from hathor.p2p.sync_v2.agent import NodeBlockSync from hathor.pubsub import PubSubManager @@ -30,7 +32,6 @@ from hathor.util import Random, not_none from hathor.wallet import BaseWallet, HDWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock -from tests.utils import GENESIS_SEED logger = get_logger() main = ut_main @@ -150,18 +151,6 @@ def _create_test_wallet(self, unlocked: bool = False) -> Wallet: wallet.lock() return wallet - def get_dag_builder(self, manager: HathorManager) -> DAGBuilder: - genesis_wallet = HDWallet(words=GENESIS_SEED) - genesis_wallet._manually_initialize() - - return DAGBuilder( - settings=manager._settings, - daa=manager.daa, - genesis_wallet=genesis_wallet, - wallet_factory=self.get_wallet, - vertex_resolver=lambda x: manager.cpu_mining_service.resolve(x), - ) - def get_builder(self, settings: HathorSettings | None = None) -> TestBuilder: builder = TestBuilder(settings) builder.set_rng(self.rng) \ @@ -202,6 +191,8 @@ def create_peer( # type: ignore[no-untyped-def] enable_event_queue: bool | None = None, enable_ipv6: bool = False, disable_ipv4: bool = False, + nc_indices: bool = False, + nc_log_config: NCLogConfig | None = None, ): # TODO: Add -> HathorManager here. It breaks the lint in a lot of places. settings = self._settings._replace(NETWORK_NAME=network) @@ -254,6 +245,13 @@ def create_peer( # type: ignore[no-untyped-def] daa = DifficultyAdjustmentAlgorithm(settings=self._settings, test_mode=TestMode.TEST_ALL_WEIGHT) builder.set_daa(daa) + + if nc_indices: + builder.enable_nc_indices() + + if nc_log_config: + builder.set_nc_log_config(nc_log_config) + manager = self.create_peer_from_builder(builder, start_manager=start_manager) return manager @@ -436,6 +434,24 @@ def assertSyncedProgress(self, node_sync: NodeBlockSync) -> None: def assertV2SyncedProgress(self, node_sync: NodeBlockSync) -> None: self.assertEqual(node_sync.synced_block, node_sync.peer_best_block) + @contextmanager + def assertNCFail(self, class_name: str, pattern: str | re.Pattern[str] | None = None) -> Iterator[BaseException]: + """Assert that a NCFail is raised and it has the expected class name and str(exc) format. + """ + from hathor.nanocontracts.exception import NCFail + + with self.assertRaises(NCFail) as cm: + yield cm + + self.assertEqual(cm.exception.__class__.__name__, class_name) + + if pattern is not None: + actual = str(cm.exception) + if isinstance(pattern, re.Pattern): + assert pattern.match(actual) + else: + self.assertEqual(pattern, actual) + def clean_tmpdirs(self) -> None: for tmpdir in self.tmpdirs: shutil.rmtree(tmpdir) diff --git a/tests/utils.py b/tests/utils.py index 08a2101df..48a420856 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -32,6 +32,11 @@ # useful for adding blocks to a different wallet BURN_ADDRESS = bytes.fromhex('28acbfb94571417423c1ed66f706730c4aea516ac5762cccb8') +DEFAULT_WORDS: str = ( + 'bind daring above film health blush during tiny neck slight clown salmon ' + 'wine brown good setup later omit jaguar tourist rescue flip pet salute' +) + def resolve_block_bytes(*, block_bytes: bytes, cpu_mining_service: CpuMiningService) -> bytes: """ From block bytes we create a block and resolve pow