diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index 3a4631476..02055de7e 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -21,6 +21,7 @@ from hathor.conf.get_settings import get_settings from hathor.conf.settings import HathorSettings as HathorSettingsType from hathor.consensus import ConsensusAlgorithm +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event import EventManager from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage from hathor.event.websocket import EventWebsocketFactory @@ -41,7 +42,7 @@ TransactionStorage, ) from hathor.util import Random, Reactor, get_environment_info -from hathor.verification.verification_service import VerificationService +from hathor.verification.verification_service import VerificationService, VertexVerifiers from hathor.wallet import BaseWallet, Wallet logger = get_logger() @@ -102,6 +103,9 @@ def __init__(self) -> None: self._feature_service: Optional[FeatureService] = None self._bit_signaling_service: Optional[BitSignalingService] = None + self._daa: Optional[DifficultyAdjustmentAlgorithm] = None + + self._vertex_verifiers: Optional[VertexVerifiers] = None self._verification_service: Optional[VerificationService] = None self._rocksdb_path: Optional[str] = None @@ -161,6 +165,7 @@ def build(self) -> BuildArtifacts: feature_service = self._get_or_create_feature_service(tx_storage) bit_signaling_service = self._get_or_create_bit_signaling_service(tx_storage) verification_service = self._get_or_create_verification_service() + daa = self._get_or_create_daa() if self._enable_address_index: indexes.enable_address_index(pubsub) @@ -181,9 +186,11 @@ def build(self) -> BuildArtifacts: manager = HathorManager( reactor, + settings=settings, network=self._network, pubsub=pubsub, consensus_algorithm=consensus_algorithm, + daa=daa, peer_id=peer_id, tx_storage=tx_storage, p2p_manager=p2p_manager, @@ -431,10 +438,26 @@ def _get_or_create_bit_signaling_service(self, tx_storage: TransactionStorage) - def _get_or_create_verification_service(self) -> VerificationService: if self._verification_service is None: - self._verification_service = VerificationService() + verifiers = self._get_or_create_vertex_verifiers() + self._verification_service = VerificationService(verifiers=verifiers) return self._verification_service + def _get_or_create_vertex_verifiers(self) -> VertexVerifiers: + if self._vertex_verifiers is None: + settings = self._get_or_create_settings() + daa = self._get_or_create_daa() + self._vertex_verifiers = VertexVerifiers.create(settings=settings, daa=daa) + + return self._vertex_verifiers + + def _get_or_create_daa(self) -> DifficultyAdjustmentAlgorithm: + if self._daa is None: + settings = self._get_or_create_settings() + self._daa = DifficultyAdjustmentAlgorithm(settings=settings) + + return self._daa + def use_memory(self) -> 'Builder': self.check_if_can_modify() self._storage_type = StorageType.MEMORY @@ -532,6 +555,16 @@ def set_verification_service(self, verification_service: VerificationService) -> self._verification_service = verification_service return self + def set_vertex_verifiers(self, vertex_verifiers: VertexVerifiers) -> 'Builder': + self.check_if_can_modify() + self._vertex_verifiers = vertex_verifiers + return self + + def set_daa(self, daa: DifficultyAdjustmentAlgorithm) -> 'Builder': + self.check_if_can_modify() + self._daa = daa + return self + def set_reactor(self, reactor: Reactor) -> 'Builder': self.check_if_can_modify() self._reactor = reactor diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index 1f2e6f7b1..f778a4fb2 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -23,6 +23,7 @@ from hathor.cli.run_node import RunNodeArgs from hathor.consensus import ConsensusAlgorithm +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event import EventManager from hathor.exception import BuilderError from hathor.feature_activation.bit_signaling_service import BitSignalingService @@ -35,7 +36,7 @@ from hathor.pubsub import PubSubManager from hathor.stratum import StratumFactory from hathor.util import Random, Reactor -from hathor.verification.verification_service import VerificationService +from hathor.verification.verification_service import VerificationService, VertexVerifiers from hathor.wallet import BaseWallet, HDWallet, Wallet logger = get_logger() @@ -203,7 +204,10 @@ def create_manager(self, reactor: Reactor) -> HathorManager: not_support_features=self._args.signal_not_support ) - verification_service = VerificationService() + daa = DifficultyAdjustmentAlgorithm(settings=settings) + + vertex_verifiers = VertexVerifiers.create(settings=settings, daa=daa) + verification_service = VerificationService(verifiers=vertex_verifiers) p2p_manager = ConnectionsManager( reactor, @@ -220,10 +224,12 @@ def create_manager(self, reactor: Reactor) -> HathorManager: self.manager = HathorManager( reactor, + settings=settings, network=network, hostname=hostname, pubsub=pubsub, consensus_algorithm=consensus_algorithm, + daa=daa, peer_id=peer_id, tx_storage=tx_storage, p2p_manager=p2p_manager, diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py index ea8f16528..fd96a6e2d 100644 --- a/hathor/cli/events_simulator/scenario.py +++ b/hathor/cli/events_simulator/scenario.py @@ -50,7 +50,6 @@ def simulate_single_chain_one_block(simulator: 'Simulator', manager: 'HathorMana def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manager: 'HathorManager') -> None: - from hathor import daa from hathor.conf.get_settings import get_settings from tests.utils import add_new_blocks, gen_new_tx @@ -62,13 +61,13 @@ def simulate_single_chain_blocks_and_transactions(simulator: 'Simulator', manage simulator.run(60) tx = gen_new_tx(manager, address, 1000) - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() assert manager.propagate_tx(tx, fails_silently=False) simulator.run(60) tx = gen_new_tx(manager, address, 2000) - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = manager.daa.minimum_tx_weight(tx) tx.update_hash() assert manager.propagate_tx(tx, fails_silently=False) simulator.run(60) diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 9a373be90..7b723fc44 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -24,6 +24,10 @@ import requests +from hathor.conf.get_settings import get_settings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.verification.block_verifier import BlockVerifier + _SLEEP_ON_ERROR_SECONDS = 5 _MAX_CONN_RETRIES = math.inf @@ -134,7 +138,10 @@ def execute(args: Namespace) -> None: block.nonce, block.weight)) try: - block.verify_without_storage() + settings = get_settings() + daa = DifficultyAdjustmentAlgorithm(settings=settings) + verifier = BlockVerifier(settings=settings, daa=daa) + verifier.verify_without_storage(block) except HathorError: print('[{}] ERROR: Block has not been pushed because it is not valid.'.format(datetime.datetime.now())) else: diff --git a/hathor/daa.py b/hathor/daa.py index b812d1a39..ece55766a 100644 --- a/hathor/daa.py +++ b/hathor/daa.py @@ -25,7 +25,7 @@ from structlog import get_logger -from hathor.conf import HathorSettings +from hathor.conf.settings import HathorSettings from hathor.profiler import get_cpu_profiler from hathor.util import iwindows @@ -33,12 +33,8 @@ from hathor.transaction import Block, Transaction logger = get_logger() -settings = HathorSettings() cpu = get_cpu_profiler() -MIN_BLOCK_WEIGHT = settings.MIN_BLOCK_WEIGHT -AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS - class TestMode(IntFlag): __test__ = False @@ -58,173 +54,175 @@ def _set_test_mode(mode: TestMode) -> None: TEST_MODE = mode -@cpu.profiler(key=lambda block: 'calculate_block_difficulty!{}'.format(block.hash.hex())) -def calculate_block_difficulty(block: 'Block') -> float: - """ Calculate block weight according to the ascendents of `block`, using calculate_next_weight.""" - if TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: - return 1.0 - - if block.is_genesis: - return MIN_BLOCK_WEIGHT - - return calculate_next_weight(block.get_block_parent(), block.timestamp) - - -def calculate_next_weight(parent_block: 'Block', timestamp: int) -> float: - """ Calculate the next block weight, aka DAA/difficulty adjustment algorithm. - - The algorithm used is described in [RFC 22](https://gitlab.com/HathorNetwork/rfcs/merge_requests/22). - - The weight must not be less than `MIN_BLOCK_WEIGHT`. - """ - if TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: - return 1.0 - - from hathor.transaction import sum_weights - - root = parent_block - N = min(2 * settings.BLOCK_DIFFICULTY_N_BLOCKS, parent_block.get_height() - 1) - K = N // 2 - T = AVG_TIME_BETWEEN_BLOCKS - S = 5 - if N < 10: - return MIN_BLOCK_WEIGHT - - blocks: list['Block'] = [] - while len(blocks) < N + 1: - blocks.append(root) - root = root.get_block_parent() - assert root is not None - - # TODO: revise if this assertion can be safely removed - assert blocks == sorted(blocks, key=lambda tx: -tx.timestamp) - blocks = list(reversed(blocks)) - - assert len(blocks) == N + 1 - solvetimes, weights = zip(*( - (block.timestamp - prev_block.timestamp, block.weight) - for prev_block, block in iwindows(blocks, 2) - )) - assert len(solvetimes) == len(weights) == N, f'got {len(solvetimes)}, {len(weights)} expected {N}' - - sum_solvetimes = 0.0 - logsum_weights = 0.0 - - prefix_sum_solvetimes = [0] - for st in solvetimes: - prefix_sum_solvetimes.append(prefix_sum_solvetimes[-1] + st) - - # Loop through N most recent blocks. N is most recently solved block. - for i in range(K, N): - solvetime = solvetimes[i] - weight = weights[i] - x = (prefix_sum_solvetimes[i + 1] - prefix_sum_solvetimes[i - K]) / K - ki = K * (x - T)**2 / (2 * T * T) - ki = max(1, ki / S) - sum_solvetimes += ki * solvetime - logsum_weights = sum_weights(logsum_weights, log(ki, 2) + weight) - - weight = logsum_weights - log(sum_solvetimes, 2) + log(T, 2) - - # Apply weight decay - weight -= get_weight_decay_amount(timestamp - parent_block.timestamp) - - # Apply minimum weight - if weight < MIN_BLOCK_WEIGHT: - weight = MIN_BLOCK_WEIGHT - - return weight - - -def get_weight_decay_amount(distance: int) -> float: - """Return the amount to be reduced in the weight of the block.""" - if not settings.WEIGHT_DECAY_ENABLED: - return 0.0 - if distance < settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: - return 0.0 - - dt = distance - settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - - # Calculate the number of windows. - n_windows = 1 + (dt // settings.WEIGHT_DECAY_WINDOW_SIZE) - return n_windows * settings.WEIGHT_DECAY_AMOUNT - +class DifficultyAdjustmentAlgorithm: -def minimum_tx_weight(tx: 'Transaction') -> float: - """ Returns the minimum weight for the param tx - The minimum is calculated by the following function: + def __init__(self, *, settings: HathorSettings) -> None: + self._settings = settings + self.AVG_TIME_BETWEEN_BLOCKS = self._settings.AVG_TIME_BETWEEN_BLOCKS + self.MIN_BLOCK_WEIGHT = self._settings.MIN_BLOCK_WEIGHT - w = alpha * log(size, 2) + 4.0 + 4.0 - ---------------- - 1 + k / amount + @cpu.profiler(key=lambda _, block: 'calculate_block_difficulty!{}'.format(block.hash.hex())) + def calculate_block_difficulty(self, block: 'Block') -> float: + """ Calculate block weight according to the ascendents of `block`, using calculate_next_weight.""" + if TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: + return 1.0 - :param tx: tx to calculate the minimum weight - :type tx: :py:class:`hathor.transaction.transaction.Transaction` + if block.is_genesis: + return self.MIN_BLOCK_WEIGHT - :return: minimum weight for the tx - :rtype: float - """ - # In test mode we don't validate the minimum weight for tx - # We do this to allow generating many txs for testing - if TEST_MODE & TestMode.TEST_TX_WEIGHT: - return 1.0 + return self.calculate_next_weight(block.get_block_parent(), block.timestamp) - if tx.is_genesis: - return settings.MIN_TX_WEIGHT + def calculate_next_weight(self, parent_block: 'Block', timestamp: int) -> float: + """ Calculate the next block weight, aka DAA/difficulty adjustment algorithm. - tx_size = len(tx.get_struct()) + The algorithm used is described in [RFC 22](https://gitlab.com/HathorNetwork/rfcs/merge_requests/22). - # We need to take into consideration the decimal places because it is inside the amount. - # For instance, if one wants to transfer 20 HTRs, the amount will be 2000. - # Max below is preventing division by 0 when handling authority methods that have no outputs - amount = max(1, tx.sum_outputs) / (10 ** settings.DECIMAL_PLACES) - weight = ( - + settings.MIN_TX_WEIGHT_COEFFICIENT * log(tx_size, 2) - + 4 / (1 + settings.MIN_TX_WEIGHT_K / amount) + 4 - ) + The weight must not be less than `MIN_BLOCK_WEIGHT`. + """ + if TEST_MODE & TestMode.TEST_BLOCK_WEIGHT: + return 1.0 + + from hathor.transaction import sum_weights + + root = parent_block + N = min(2 * self._settings.BLOCK_DIFFICULTY_N_BLOCKS, parent_block.get_height() - 1) + K = N // 2 + T = self.AVG_TIME_BETWEEN_BLOCKS + S = 5 + if N < 10: + return self.MIN_BLOCK_WEIGHT + + blocks: list['Block'] = [] + while len(blocks) < N + 1: + blocks.append(root) + root = root.get_block_parent() + assert root is not None - # Make sure the calculated weight is at least the minimum - weight = max(weight, settings.MIN_TX_WEIGHT) + # TODO: revise if this assertion can be safely removed + assert blocks == sorted(blocks, key=lambda tx: -tx.timestamp) + blocks = list(reversed(blocks)) - return weight + assert len(blocks) == N + 1 + solvetimes, weights = zip(*( + (block.timestamp - prev_block.timestamp, block.weight) + for prev_block, block in iwindows(blocks, 2) + )) + assert len(solvetimes) == len(weights) == N, f'got {len(solvetimes)}, {len(weights)} expected {N}' + + sum_solvetimes = 0.0 + logsum_weights = 0.0 + + prefix_sum_solvetimes = [0] + for st in solvetimes: + prefix_sum_solvetimes.append(prefix_sum_solvetimes[-1] + st) + # Loop through N most recent blocks. N is most recently solved block. + for i in range(K, N): + solvetime = solvetimes[i] + weight = weights[i] + x = (prefix_sum_solvetimes[i + 1] - prefix_sum_solvetimes[i - K]) / K + ki = K * (x - T)**2 / (2 * T * T) + ki = max(1, ki / S) + sum_solvetimes += ki * solvetime + logsum_weights = sum_weights(logsum_weights, log(ki, 2) + weight) -def get_tokens_issued_per_block(height: int) -> int: - """Return the number of tokens issued (aka reward) per block of a given height.""" - if settings.BLOCKS_PER_HALVING is None: - assert settings.MINIMUM_TOKENS_PER_BLOCK == settings.INITIAL_TOKENS_PER_BLOCK - return settings.MINIMUM_TOKENS_PER_BLOCK + weight = logsum_weights - log(sum_solvetimes, 2) + log(T, 2) - number_of_halvings = (height - 1) // settings.BLOCKS_PER_HALVING - number_of_halvings = max(0, number_of_halvings) + # Apply weight decay + weight -= self.get_weight_decay_amount(timestamp - parent_block.timestamp) - if number_of_halvings > settings.MAXIMUM_NUMBER_OF_HALVINGS: - return settings.MINIMUM_TOKENS_PER_BLOCK + # Apply minimum weight + if weight < self.MIN_BLOCK_WEIGHT: + weight = self.MIN_BLOCK_WEIGHT - amount = settings.INITIAL_TOKENS_PER_BLOCK // (2**number_of_halvings) - amount = max(amount, settings.MINIMUM_TOKENS_PER_BLOCK) - return amount + return weight + def get_weight_decay_amount(self, distance: int) -> float: + """Return the amount to be reduced in the weight of the block.""" + if not self._settings.WEIGHT_DECAY_ENABLED: + return 0.0 + if distance < self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: + return 0.0 -def get_mined_tokens(height: int) -> int: - """Return the number of tokens mined in total at height - """ - assert settings.BLOCKS_PER_HALVING is not None - number_of_halvings = (height - 1) // settings.BLOCKS_PER_HALVING - number_of_halvings = max(0, number_of_halvings) + dt = distance - self._settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - blocks_in_this_halving = height - number_of_halvings * settings.BLOCKS_PER_HALVING + # Calculate the number of windows. + n_windows = 1 + (dt // self._settings.WEIGHT_DECAY_WINDOW_SIZE) + return n_windows * self._settings.WEIGHT_DECAY_AMOUNT - tokens_per_block = settings.INITIAL_TOKENS_PER_BLOCK - mined_tokens = 0 + def minimum_tx_weight(self, tx: 'Transaction') -> float: + """ Returns the minimum weight for the param tx + The minimum is calculated by the following function: + + w = alpha * log(size, 2) + 4.0 + 4.0 + ---------------- + 1 + k / amount + + :param tx: tx to calculate the minimum weight + :type tx: :py:class:`hathor.transaction.transaction.Transaction` + + :return: minimum weight for the tx + :rtype: float + """ + # In test mode we don't validate the minimum weight for tx + # We do this to allow generating many txs for testing + if TEST_MODE & TestMode.TEST_TX_WEIGHT: + return 1.0 + + if tx.is_genesis: + return self._settings.MIN_TX_WEIGHT + + tx_size = len(tx.get_struct()) + + # We need to take into consideration the decimal places because it is inside the amount. + # For instance, if one wants to transfer 20 HTRs, the amount will be 2000. + # Max below is preventing division by 0 when handling authority methods that have no outputs + amount = max(1, tx.sum_outputs) / (10 ** self._settings.DECIMAL_PLACES) + weight = ( + + self._settings.MIN_TX_WEIGHT_COEFFICIENT * log(tx_size, 2) + + 4 / (1 + self._settings.MIN_TX_WEIGHT_K / amount) + 4 + ) + + # Make sure the calculated weight is at least the minimum + weight = max(weight, self._settings.MIN_TX_WEIGHT) + + return weight + + def get_tokens_issued_per_block(self, height: int) -> int: + """Return the number of tokens issued (aka reward) per block of a given height.""" + if self._settings.BLOCKS_PER_HALVING is None: + assert self._settings.MINIMUM_TOKENS_PER_BLOCK == self._settings.INITIAL_TOKENS_PER_BLOCK + return self._settings.MINIMUM_TOKENS_PER_BLOCK + + number_of_halvings = (height - 1) // self._settings.BLOCKS_PER_HALVING + number_of_halvings = max(0, number_of_halvings) + + if number_of_halvings > self._settings.MAXIMUM_NUMBER_OF_HALVINGS: + return self._settings.MINIMUM_TOKENS_PER_BLOCK + + amount = self._settings.INITIAL_TOKENS_PER_BLOCK // (2**number_of_halvings) + amount = max(amount, self._settings.MINIMUM_TOKENS_PER_BLOCK) + return amount + + def get_mined_tokens(self, height: int) -> int: + """Return the number of tokens mined in total at height + """ + assert self._settings.BLOCKS_PER_HALVING is not None + number_of_halvings = (height - 1) // self._settings.BLOCKS_PER_HALVING + number_of_halvings = max(0, number_of_halvings) + + blocks_in_this_halving = height - number_of_halvings * self._settings.BLOCKS_PER_HALVING + + tokens_per_block = self._settings.INITIAL_TOKENS_PER_BLOCK + mined_tokens = 0 - # Sum the past halvings - for _ in range(number_of_halvings): - mined_tokens += settings.BLOCKS_PER_HALVING * tokens_per_block - tokens_per_block //= 2 - tokens_per_block = max(tokens_per_block, settings.MINIMUM_TOKENS_PER_BLOCK) + # Sum the past halvings + for _ in range(number_of_halvings): + mined_tokens += self._settings.BLOCKS_PER_HALVING * tokens_per_block + tokens_per_block //= 2 + tokens_per_block = max(tokens_per_block, self._settings.MINIMUM_TOKENS_PER_BLOCK) - # Sum the blocks in the current halving - mined_tokens += blocks_in_this_halving * tokens_per_block + # Sum the blocks in the current halving + mined_tokens += blocks_in_this_halving * tokens_per_block - return mined_tokens + return mined_tokens diff --git a/hathor/manager.py b/hathor/manager.py index 99f46cc5e..83137bd05 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -25,10 +25,10 @@ from twisted.internet.task import LoopingCall from twisted.python.threadpool import ThreadPool -from hathor import daa from hathor.checkpoint import Checkpoint -from hathor.conf import HathorSettings +from hathor.conf.settings import HathorSettings from hathor.consensus import ConsensusAlgorithm +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.event.event_manager import EventManager from hathor.exception import ( DoubleSpendingError, @@ -60,18 +60,10 @@ from hathor.verification.verification_service import VerificationService from hathor.wallet import BaseWallet -settings = HathorSettings() logger = get_logger() cpu = get_cpu_profiler() -DEFAULT_CAPABILITIES = [ - settings.CAPABILITY_WHITELIST, - settings.CAPABILITY_SYNC_VERSION, - settings.CAPABILITY_GET_BEST_BLOCKCHAIN -] - - class HathorManager: """ HathorManager manages the node with the help of other specialized classes. @@ -95,8 +87,10 @@ class UnhealthinessReason(str, Enum): def __init__(self, reactor: Reactor, *, + settings: HathorSettings, pubsub: PubSubManager, consensus_algorithm: ConsensusAlgorithm, + daa: DifficultyAdjustmentAlgorithm, peer_id: PeerId, tx_storage: TransactionStorage, p2p_manager: ConnectionsManager, @@ -130,6 +124,8 @@ def __init__(self, 'Either enable it, or use the reset-event-queue CLI command to remove all event-related data' ) + self.settings = settings + self.daa = daa self._cmd_path: Optional[str] = None self.log = logger.new() @@ -223,7 +219,7 @@ def __init__(self, if capabilities is not None: self.capabilities = capabilities else: - self.capabilities = DEFAULT_CAPABILITIES + self.capabilities = self.get_default_capabilities() # This is included in some logs to provide more context self.environment_info = environment_info @@ -233,6 +229,13 @@ def __init__(self, self.lc_check_sync_state.clock = self.reactor self.lc_check_sync_state_interval = self.CHECK_SYNC_STATE_INTERVAL + def get_default_capabilities(self) -> list[str]: + return [ + self.settings.CAPABILITY_WHITELIST, + self.settings.CAPABILITY_SYNC_VERSION, + self.settings.CAPABILITY_GET_BEST_BLOCKCHAIN + ] + def start(self) -> None: """ A factory must be started only once. And it is usually automatically started. """ @@ -443,7 +446,7 @@ def _initialize_components_full_verification(self) -> None: # It's safe to skip block weight verification during initialization because # we trust the difficulty stored in metadata skip_block_weight_verification = True - if block_count % settings.VERIFY_WEIGHT_EVERY_N_BLOCKS == 0: + if block_count % self.settings.VERIFY_WEIGHT_EVERY_N_BLOCKS == 0: skip_block_weight_verification = False try: @@ -628,14 +631,14 @@ def _verify_soft_voided_txs(self) -> None: soft_voided_meta = soft_voided_tx.get_metadata() voided_set = soft_voided_meta.voided_by or set() # If the tx is not marked as soft voided, then we can't continue the initialization - if settings.SOFT_VOIDED_ID not in voided_set: + if self.settings.SOFT_VOIDED_ID not in voided_set: self.log.error( 'Error initializing node. Your database is not compatible with the current version of the' ' full node. You must use the latest available snapshot or sync from the beginning.' ) sys.exit(-1) - assert {soft_voided_id, settings.SOFT_VOIDED_ID}.issubset(voided_set) + assert {soft_voided_id, self.settings.SOFT_VOIDED_ID}.issubset(voided_set) def _verify_checkpoints(self) -> None: """ Method to verify if all checkpoints that exist in the database have the correct hash and are winners. @@ -774,7 +777,7 @@ def make_block_template(self, parent_block_hash: VertexId, timestamp: Optional[i """ parent_block = self.tx_storage.get_transaction(parent_block_hash) assert isinstance(parent_block, Block) - parent_txs = self.generate_parent_txs(parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS) + parent_txs = self.generate_parent_txs(parent_block.timestamp + self.settings.MAX_DISTANCE_BETWEEN_BLOCKS) if timestamp is None: current_timestamp = int(max(self.tx_storage.latest_timestamp, self.reactor.seconds())) else: @@ -810,7 +813,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur timestamp_abs_min = parent_block.timestamp + 1 # and absolute maximum limited by max time between blocks if not parent_block.is_genesis: - timestamp_abs_max = parent_block.timestamp + settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1 + timestamp_abs_max = parent_block.timestamp + self.settings.MAX_DISTANCE_BETWEEN_BLOCKS - 1 else: timestamp_abs_max = 0xffffffff assert timestamp_abs_max > timestamp_abs_min @@ -819,12 +822,12 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur timestamp_min = max(timestamp_abs_min, parent_txs.max_timestamp + 1) assert timestamp_min <= timestamp_abs_max # when we have weight decay, the max timestamp will be when the next decay happens - if with_weight_decay and settings.WEIGHT_DECAY_ENABLED: + if with_weight_decay and self.settings.WEIGHT_DECAY_ENABLED: # we either have passed the first decay or not, the range will vary depending on that - if timestamp_min > timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: - timestamp_max_decay = timestamp_min + settings.WEIGHT_DECAY_WINDOW_SIZE + if timestamp_min > timestamp_abs_min + self.settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: + timestamp_max_decay = timestamp_min + self.settings.WEIGHT_DECAY_WINDOW_SIZE else: - timestamp_max_decay = timestamp_abs_min + settings.WEIGHT_DECAY_ACTIVATE_DISTANCE + timestamp_max_decay = timestamp_abs_min + self.settings.WEIGHT_DECAY_ACTIVATE_DISTANCE timestamp_max = min(timestamp_abs_max, timestamp_max_decay) else: timestamp_max = timestamp_abs_max @@ -833,8 +836,11 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur # this is the min weight to cause an increase of twice the WEIGHT_TOL, we make sure to generate a template with # at least this weight (note that the user of the API can set its own weight, the block sumit API will also # protect agains a weight that is too small but using WEIGHT_TOL instead of 2*WEIGHT_TOL) - min_significant_weight = calculate_min_significant_weight(parent_block_metadata.score, 2 * settings.WEIGHT_TOL) - weight = max(daa.calculate_next_weight(parent_block, timestamp), min_significant_weight) + min_significant_weight = calculate_min_significant_weight( + parent_block_metadata.score, + 2 * self.settings.WEIGHT_TOL + ) + weight = max(self.daa.calculate_next_weight(parent_block, timestamp), min_significant_weight) height = parent_block.get_height() + 1 parents = [parent_block.hash] + parent_txs.must_include parents_any = parent_txs.can_include @@ -848,7 +854,7 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur assert len(parents_any) == 0, 'Extra parents to choose from that cannot be chosen' return BlockTemplate( versions={TxVersion.REGULAR_BLOCK.value, TxVersion.MERGE_MINED_BLOCK.value}, - reward=daa.get_tokens_issued_per_block(height), + reward=self.daa.get_tokens_issued_per_block(height), weight=weight, timestamp_now=current_timestamp, timestamp_min=timestamp_min, @@ -885,7 +891,7 @@ def generate_mining_block(self, timestamp: Optional[int] = None, def get_tokens_issued_per_block(self, height: int) -> int: """Return the number of tokens issued (aka reward) per block of a given height.""" - return daa.get_tokens_issued_per_block(height) + return self.daa.get_tokens_issued_per_block(height) def submit_block(self, blk: Block, fails_silently: bool = True) -> bool: """Used by submit block from all mining APIs. @@ -898,15 +904,21 @@ def submit_block(self, blk: Block, fails_silently: bool = True) -> bool: parent_block = self.tx_storage.get_transaction(parent_hash) parent_block_metadata = parent_block.get_metadata() # this is the smallest weight that won't cause the score to increase, anything equal or smaller is bad - min_insignificant_weight = calculate_min_significant_weight(parent_block_metadata.score, settings.WEIGHT_TOL) + min_insignificant_weight = calculate_min_significant_weight( + parent_block_metadata.score, + self.settings.WEIGHT_TOL + ) if blk.weight <= min_insignificant_weight: self.log.warn('submit_block(): insignificant weight? accepted anyway', blk=blk.hash_hex, weight=blk.weight) return self.propagate_tx(blk, fails_silently=fails_silently) def push_tx(self, tx: Transaction, allow_non_standard_script: bool = False, - max_output_script_size: int = settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE) -> None: + max_output_script_size: int | None = None) -> None: """Used by all APIs that accept a new transaction (like push_tx) """ + if max_output_script_size is None: + max_output_script_size = self.settings.PUSHTX_MAX_OUTPUT_SCRIPT_SIZE + is_double_spending = tx.is_double_spending() if is_double_spending: raise DoubleSpendingError('Invalid transaction. At least one of your inputs has already been spent.') @@ -968,7 +980,7 @@ def on_new_tx(self, tx: BaseTransaction, *, conn: Optional[HathorProtocol] = Non self.tx_storage.compare_bytes_with_local_tx(tx) already_exists = True - if tx.timestamp - self.reactor.seconds() > settings.MAX_FUTURE_TIMESTAMP_ALLOWED: + if tx.timestamp - self.reactor.seconds() > self.settings.MAX_FUTURE_TIMESTAMP_ALLOWED: if not fails_silently: raise InvalidNewTransaction('Ignoring transaction in the future {} (timestamp={})'.format( tx.hash_hex, tx.timestamp)) @@ -1117,7 +1129,7 @@ def tx_fully_validated(self, tx: BaseTransaction, *, quiet: bool) -> None: def _log_feature_states(self, vertex: BaseTransaction) -> None: """Log features states for a block. Used as part of the Feature Activation Phased Testing.""" - if not settings.FEATURE_ACTIVATION.enable_usage or not isinstance(vertex, Block): + if not self.settings.FEATURE_ACTIVATION.enable_usage or not isinstance(vertex, Block): return feature_descriptions = self._feature_service.get_bits_description(block=vertex) @@ -1153,10 +1165,10 @@ def listen(self, description: str, use_ssl: Optional[bool] = None) -> None: self.my_peer.entrypoints.append(address) def has_sync_version_capability(self) -> bool: - return settings.CAPABILITY_SYNC_VERSION in self.capabilities + return self.settings.CAPABILITY_SYNC_VERSION in self.capabilities def add_peer_to_whitelist(self, peer_id): - if not settings.ENABLE_PEER_WHITELIST: + if not self.settings.ENABLE_PEER_WHITELIST: return if peer_id in self.peers_whitelist: @@ -1165,7 +1177,7 @@ def add_peer_to_whitelist(self, peer_id): self.peers_whitelist.append(peer_id) def remove_peer_from_whitelist_and_disconnect(self, peer_id: str) -> None: - if not settings.ENABLE_PEER_WHITELIST: + if not self.settings.ENABLE_PEER_WHITELIST: return if peer_id in self.peers_whitelist: @@ -1179,7 +1191,9 @@ def has_recent_activity(self) -> bool: # We use the avg time between blocks as a basis to know how much time we should use to consider the fullnode # as not synced. - maximum_timestamp_delta = settings.P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER * settings.AVG_TIME_BETWEEN_BLOCKS + maximum_timestamp_delta = ( + self.settings.P2P_RECENT_ACTIVITY_THRESHOLD_MULTIPLIER * self.settings.AVG_TIME_BETWEEN_BLOCKS + ) if current_timestamp - latest_blockchain_timestamp > maximum_timestamp_delta: return False diff --git a/hathor/p2p/resources/mining_info.py b/hathor/p2p/resources/mining_info.py index 4aae45616..8263ee273 100644 --- a/hathor/p2p/resources/mining_info.py +++ b/hathor/p2p/resources/mining_info.py @@ -17,7 +17,6 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.conf.get_settings import get_settings -from hathor.daa import get_mined_tokens from hathor.difficulty import Weight from hathor.util import json_dumpb @@ -57,7 +56,7 @@ def render_GET(self, request): parent = block.get_block_parent() hashrate = 2**(parent.weight - log(30, 2)) - mined_tokens = get_mined_tokens(height) + mined_tokens = self.manager.daa.get_mined_tokens(height) data = { 'hashrate': hashrate, diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index cae937e03..c9340486f 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -22,14 +22,21 @@ from hathor.builder import BuildArtifacts, Builder from hathor.conf.get_settings import get_settings -from hathor.daa import TestMode, _set_test_mode +from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode, _set_test_mode from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId from hathor.simulator.clock import HeapClock, MemoryReactorHeapClock from hathor.simulator.miner.geometric_miner import GeometricMiner from hathor.simulator.tx_generator import RandomTransactionGenerator +from hathor.simulator.verification import ( + SimulatorBlockVerifier, + SimulatorMergeMinedBlockVerifier, + SimulatorTokenCreationTransactionVerifier, + SimulatorTransactionVerifier, +) from hathor.transaction.genesis import _get_genesis_transactions_unsafe from hathor.util import Random +from hathor.verification.verification_service import VertexVerifiers from hathor.wallet import HDWallet if TYPE_CHECKING: @@ -41,6 +48,7 @@ DEFAULT_STEP_INTERVAL: float = 0.25 DEFAULT_STATUS_INTERVAL: float = 60.0 +SIMULATOR_AVG_TIME_BETWEEN_BLOCKS: int = 64 class Simulator: @@ -53,45 +61,28 @@ def _apply_patches(cls): Patches: - - disable pow verification - disable Transaction.resolve method - set DAA test-mode to DISABLED (will actually run the pow function, that won't actually verify the pow) - - override AVG_TIME_BETWEEN_BLOCKS to 64 """ from hathor.transaction import BaseTransaction - def verify_pow(self: BaseTransaction, *args: Any, **kwargs: Any) -> None: - assert self.hash is not None - logger.new().debug('Skipping BaseTransaction.verify_pow() for simulator') - def resolve(self: BaseTransaction, update_time: bool = True) -> bool: self.update_hash() logger.new().debug('Skipping BaseTransaction.resolve() for simulator') return True - cls._original_verify_pow = BaseTransaction.verify_pow - BaseTransaction.verify_pow = verify_pow - cls._original_resolve = BaseTransaction.resolve BaseTransaction.resolve = resolve _set_test_mode(TestMode.DISABLED) - from hathor import daa - cls._original_avg_time_between_blocks = daa.AVG_TIME_BETWEEN_BLOCKS - daa.AVG_TIME_BETWEEN_BLOCKS = 64 - @classmethod def _remove_patches(cls): """ Remove the patches previously applied. """ from hathor.transaction import BaseTransaction - BaseTransaction.verify_pow = cls._original_verify_pow BaseTransaction.resolve = cls._original_resolve - from hathor import daa - daa.AVG_TIME_BETWEEN_BLOCKS = cls._original_avg_time_between_blocks - @classmethod def _patches_rc_increment(cls): """ This is used by when starting instances of Simulator to determine when to run _apply_patches""" @@ -116,7 +107,7 @@ def __init__(self, seed: Optional[int] = None): seed = secrets.randbits(64) self.seed = seed self.rng = Random(self.seed) - self.settings = get_settings() + self.settings = get_settings()._replace(AVG_TIME_BETWEEN_BLOCKS=SIMULATOR_AVG_TIME_BETWEEN_BLOCKS) self._network = 'testnet' self._clock = MemoryReactorHeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() @@ -150,7 +141,8 @@ def get_default_builder(self) -> Builder: .enable_full_verification() \ .enable_sync_v1() \ .enable_sync_v2() \ - .use_memory() + .use_memory() \ + .set_settings(self.settings) def create_peer(self, builder: Optional[Builder] = None) -> HathorManager: """ @@ -171,10 +163,20 @@ def create_artifacts(self, builder: Optional[Builder] = None) -> BuildArtifacts: wallet = HDWallet(gap_limit=2) wallet._manually_initialize() + daa = DifficultyAdjustmentAlgorithm(settings=self.settings) + vertex_verifiers = VertexVerifiers( + block=SimulatorBlockVerifier(settings=self.settings, daa=daa), + merge_mined_block=SimulatorMergeMinedBlockVerifier(settings=self.settings, daa=daa), + tx=SimulatorTransactionVerifier(settings=self.settings, daa=daa), + token_creation_tx=SimulatorTokenCreationTransactionVerifier(settings=self.settings, daa=daa), + ) + artifacts = builder \ .set_reactor(self._clock) \ .set_rng(Random(self.rng.getrandbits(64))) \ .set_wallet(wallet) \ + .set_vertex_verifiers(vertex_verifiers) \ + .set_daa(daa) \ .build() artifacts.manager.start() diff --git a/hathor/simulator/tx_generator.py b/hathor/simulator/tx_generator.py index 6bb76c1a8..3bd51ef1c 100644 --- a/hathor/simulator/tx_generator.py +++ b/hathor/simulator/tx_generator.py @@ -17,7 +17,6 @@ from structlog import get_logger -from hathor import daa from hathor.conf.get_settings import get_settings from hathor.transaction.exceptions import RewardLocked from hathor.util import Random @@ -128,7 +127,7 @@ def new_tx_step1(self): self.delayedcall = self.clock.callLater(0, self.schedule_next_transaction) return - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = self.manager.daa.minimum_tx_weight(tx) tx.update_hash() geometric_p = 2**(-tx.weight) diff --git a/hathor/simulator/verification.py b/hathor/simulator/verification.py new file mode 100644 index 000000000..849a9a0e9 --- /dev/null +++ b/hathor/simulator/verification.py @@ -0,0 +1,54 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from structlog import get_logger + +from hathor.transaction import BaseTransaction +from hathor.verification.block_verifier import BlockVerifier +from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier +from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier +from hathor.verification.transaction_verifier import TransactionVerifier + +logger = get_logger() + + +def verify_pow(vertex: BaseTransaction) -> None: + assert vertex.hash is not None + logger.new().debug('Skipping BaseTransaction.verify_pow() for simulator') + + +class SimulatorBlockVerifier(BlockVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) + + +class SimulatorMergeMinedBlockVerifier(MergeMinedBlockVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) + + +class SimulatorTransactionVerifier(TransactionVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) + + +class SimulatorTokenCreationTransactionVerifier(TokenCreationTransactionVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index 6abc2dfbd..16b278661 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -41,6 +41,7 @@ from hathor.transaction import BaseTransaction, BitcoinAuxPow, Block, MergeMinedBlock, Transaction, sum_weights from hathor.transaction.exceptions import PowError, ScriptError, TxValidationError from hathor.util import Reactor, json_dumpb, json_loadb, reactor +from hathor.verification.vertex_verifier import VertexVerifier from hathor.wallet.exceptions import InvalidAddress if TYPE_CHECKING: @@ -526,7 +527,7 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: self.log.debug('share received', block=tx, block_base=block_base.hex(), block_base_hash=block_base_hash.hex()) try: - tx.verify_pow(job.weight) + VertexVerifier.verify_pow(tx, override_weight=job.weight) except PowError: self.log.error('bad share, discard', job_weight=job.weight, tx=tx) return self.send_error(INVALID_SOLUTION, msgid, { @@ -542,7 +543,7 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: self.manager.reactor.callLater(0, self.job_request) try: - tx.verify_pow() + VertexVerifier.verify_pow(tx) except PowError: # Transaction pow was not enough, but the share was succesfully submited self.log.info('high hash, keep mining', tx=tx) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 0e4b2eee5..235e7a16f 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -28,19 +28,7 @@ from hathor.checkpoint import Checkpoint from hathor.conf.get_settings import get_settings -from hathor.transaction.exceptions import ( - DuplicatedParents, - IncorrectParents, - InvalidOutputScriptSize, - InvalidOutputValue, - InvalidToken, - ParentDoesNotExist, - PowError, - TimestampError, - TooManyOutputs, - TooManySigOps, - WeightError, -) +from hathor.transaction.exceptions import InvalidOutputValue, WeightError from hathor.transaction.transaction_metadata import TransactionMetadata from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.transaction.validation_state import ValidationState @@ -70,14 +58,6 @@ # Weight (d), timestamp (I), and parents len (B) _GRAPH_FORMAT_STRING = '!dIB' -# tx should have 2 parents, both other transactions -_TX_PARENTS_TXS = 2 -_TX_PARENTS_BLOCKS = 0 - -# blocks have 3 parents, 2 txs and 1 block -_BLOCK_PARENTS_TXS = 2 -_BLOCK_PARENTS_BLOCKS = 1 - # The int value of one byte _ONE_BYTE = 0xFF @@ -540,137 +520,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: To be implemented by tx/block, used by `self.validate_checkpoint`. Should not modify the validation state.""" raise NotImplementedError - def verify_parents(self) -> None: - """All parents must exist and their timestamps must be smaller than ours. - - Also, txs should have 2 other txs as parents, while blocks should have 2 txs + 1 block. - - Parents must be ordered with blocks first, followed by transactions. - - :raises TimestampError: when our timestamp is less or equal than our parent's timestamp - :raises ParentDoesNotExist: when at least one of our parents does not exist - :raises IncorrectParents: when tx does not confirm the correct number/type of parent txs - """ - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - - assert self.storage is not None - - # check if parents are duplicated - parents_set = set(self.parents) - if len(self.parents) > len(parents_set): - raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in self.parents]) - - my_parents_txs = 0 # number of tx parents - my_parents_blocks = 0 # number of block parents - min_timestamp: Optional[int] = None - - for parent_hash in self.parents: - try: - parent = self.storage.get_transaction(parent_hash) - assert parent.hash is not None - if self.timestamp <= parent.timestamp: - raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( - self.hash_hex, - self.timestamp, - parent.hash_hex, - parent.timestamp, - )) - - if parent.is_block: - if self.is_block and not parent.is_genesis: - if self.timestamp - parent.timestamp > self._settings.MAX_DISTANCE_BETWEEN_BLOCKS: - raise TimestampError('Distance between blocks is too big' - ' ({} seconds)'.format(self.timestamp - parent.timestamp)) - if my_parents_txs > 0: - raise IncorrectParents('Parents which are blocks must come before transactions') - for pi_hash in parent.parents: - pi = self.storage.get_transaction(parent_hash) - if not pi.is_block: - min_timestamp = ( - min(min_timestamp, pi.timestamp) if min_timestamp is not None - else pi.timestamp - ) - my_parents_blocks += 1 - else: - if min_timestamp and parent.timestamp < min_timestamp: - raise TimestampError('tx={} timestamp={}, parent={} timestamp={}, min_timestamp={}'.format( - self.hash_hex, - self.timestamp, - parent.hash_hex, - parent.timestamp, - min_timestamp - )) - my_parents_txs += 1 - except TransactionDoesNotExist: - raise ParentDoesNotExist('tx={} parent={}'.format(self.hash_hex, parent_hash.hex())) - - # check for correct number of parents - if self.is_block: - parents_txs = _BLOCK_PARENTS_TXS - parents_blocks = _BLOCK_PARENTS_BLOCKS - else: - parents_txs = _TX_PARENTS_TXS - parents_blocks = _TX_PARENTS_BLOCKS - if my_parents_blocks != parents_blocks: - raise IncorrectParents('wrong number of parents (block type): {}, expecting {}'.format( - my_parents_blocks, parents_blocks)) - if my_parents_txs != parents_txs: - raise IncorrectParents('wrong number of parents (tx type): {}, expecting {}'.format( - my_parents_txs, parents_txs)) - - def verify_pow(self, override_weight: Optional[float] = None) -> None: - """Verify proof-of-work - - :raises PowError: when the hash is equal or greater than the target - """ - assert self.hash is not None - numeric_hash = int(self.hash_hex, self.HEX_BASE) - minimum_target = self.get_target(override_weight) - if numeric_hash >= minimum_target: - raise PowError(f'Transaction has invalid data ({numeric_hash} < {minimum_target})') - - def verify_number_of_outputs(self) -> None: - """Verify number of outputs does not exceeds the limit""" - if len(self.outputs) > self._settings.MAX_NUM_OUTPUTS: - raise TooManyOutputs('Maximum number of outputs exceeded') - - def verify_sigops_output(self) -> None: - """ Count sig operations on all outputs and verify that the total sum is below the limit - """ - from hathor.transaction.scripts import get_sigops_count - n_txops = 0 - - for tx_output in self.outputs: - n_txops += get_sigops_count(tx_output.script) - - if n_txops > self._settings.MAX_TX_SIGOPS_OUTPUT: - raise TooManySigOps('TX[{}]: Maximum number of sigops for all outputs exceeded ({})'.format( - self.hash_hex, n_txops)) - - def verify_outputs(self) -> None: - """Verify there are no hathor authority UTXOs and outputs are all positive - - :raises InvalidToken: when there's a hathor authority utxo - :raises InvalidOutputValue: output has negative value - :raises TooManyOutputs: when there are too many outputs - """ - self.verify_number_of_outputs() - for index, output in enumerate(self.outputs): - # no hathor authority UTXO - if (output.get_token_index() == 0) and output.is_token_authority(): - raise InvalidToken('Cannot have authority UTXO for hathor tokens: {}'.format( - output.to_human_readable())) - - # output value must be positive - if output.value <= 0: - raise InvalidOutputValue('Output value must be a positive integer. Value: {} and index: {}'.format( - output.value, index)) - - if len(output.script) > self._settings.MAX_OUTPUT_SCRIPT_SIZE: - raise InvalidOutputScriptSize('size: {} and max-size: {}'.format( - len(output.script), self._settings.MAX_OUTPUT_SCRIPT_SIZE - )) - def resolve(self, update_time: bool = False) -> bool: """Run a CPU mining looking for the nonce that solves the proof-of-work diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index bef6f3368..b416d9fd3 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -18,21 +18,12 @@ from struct import pack from typing import TYPE_CHECKING, Any, Optional -from hathor import daa from hathor.checkpoint import Checkpoint from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, TxOutput, TxVersion -from hathor.transaction.exceptions import ( - BlockWithInputs, - BlockWithTokensError, - CheckpointError, - InvalidBlockReward, - RewardLocked, - TransactionDataError, - WeightError, -) +from hathor.transaction.exceptions import CheckpointError from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.util import not_none from hathor.utils.int import get_bit_list @@ -337,55 +328,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: # TODO: check whether self is a parent of any checkpoint-valid block, this is left for a future PR pass - def verify_weight(self) -> None: - """Validate minimum block difficulty.""" - block_weight = daa.calculate_block_difficulty(self) - if self.weight < block_weight - self._settings.WEIGHT_TOL: - raise WeightError(f'Invalid new block {self.hash_hex}: weight ({self.weight}) is ' - f'smaller than the minimum weight ({block_weight})') - - def verify_height(self) -> None: - """Validate that the block height is enough to confirm all transactions being confirmed.""" - meta = self.get_metadata() - assert meta.height is not None - assert meta.min_height is not None - if meta.height < meta.min_height: - raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') - - def verify_reward(self) -> None: - """Validate reward amount.""" - parent_block = self.get_block_parent() - tokens_issued_per_block = daa.get_tokens_issued_per_block(parent_block.get_height() + 1) - if self.sum_outputs != tokens_issued_per_block: - raise InvalidBlockReward( - f'Invalid number of issued tokens tag=invalid_issued_tokens tx.hash={self.hash_hex} ' - f'issued={self.sum_outputs} allowed={tokens_issued_per_block}' - ) - - def verify_no_inputs(self) -> None: - inputs = getattr(self, 'inputs', None) - if inputs: - raise BlockWithInputs('number of inputs {}'.format(len(inputs))) - - def verify_outputs(self) -> None: - super().verify_outputs() - for output in self.outputs: - if output.get_token_index() > 0: - raise BlockWithTokensError('in output: {}'.format(output.to_human_readable())) - - def verify_data(self) -> None: - if len(self.data) > self._settings.BLOCK_DATA_MAX_SIZE: - raise TransactionDataError('block data has {} bytes'.format(len(self.data))) - - def verify_without_storage(self) -> None: - """ Run all verifications that do not need a storage. - """ - self.verify_pow() - self.verify_no_inputs() - self.verify_outputs() - self.verify_data() - self.verify_sigops_output() - def get_base_hash(self) -> bytes: from hathor.merged_mining.bitcoin import sha256d_hash return sha256d_hash(self.get_header_without_nonce()) diff --git a/hathor/transaction/merge_mined_block.py b/hathor/transaction/merge_mined_block.py index 121011a23..a6818ecde 100644 --- a/hathor/transaction/merge_mined_block.py +++ b/hathor/transaction/merge_mined_block.py @@ -74,13 +74,3 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - del json['nonce'] json['aux_pow'] = bytes(self.aux_pow).hex() if self.aux_pow else None return json - - def verify_without_storage(self) -> None: - self.verify_aux_pow() - super().verify_without_storage() - - def verify_aux_pow(self) -> None: - """ Verify auxiliary proof-of-work (for merged mining). - """ - assert self.aux_pow is not None - self.aux_pow.verify(self.get_base_hash()) diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index 438d1f23d..b9641369a 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -17,7 +17,6 @@ from hathor.api_util import Resource, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight from hathor.exception import InvalidNewTransaction from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import create_output_script @@ -88,8 +87,8 @@ def render_POST(self, request): for tx_input in fake_signed_tx.inputs: # conservative estimate of the input data size to estimate a valid weight tx_input.data = b'\0' * 107 - tx.weight = minimum_tx_weight(fake_signed_tx) - tx.verify_unsigned_skip_pow() + tx.weight = self.manager.daa.minimum_tx_weight(fake_signed_tx) + self.manager.verification_service.verifiers.tx.verify_unsigned_skip_pow(tx) if tx.is_double_spending(): raise InvalidNewTransaction('At least one of your inputs has already been spent.') diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index c2e63f9f2..5bcc672a8 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -16,10 +16,9 @@ from typing import Any, Optional from hathor.transaction.base_transaction import TxInput, TxOutput, TxVersion -from hathor.transaction.exceptions import InvalidToken, TransactionDataError from hathor.transaction.storage import TransactionStorage # noqa: F401 -from hathor.transaction.transaction import TokenInfo, Transaction -from hathor.transaction.util import VerboseCallback, clean_token_string, int_to_bytes, unpack, unpack_len +from hathor.transaction.transaction import Transaction +from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len # Signal bits (B), version (B), inputs len (B), outputs len (B) _FUNDS_FORMAT_STRING = '!BBBB' @@ -220,45 +219,6 @@ def to_json_extended(self) -> dict[str, Any]: json['tokens'] = [] return json - def verify_sum(self) -> None: - """ Besides all checks made on regular transactions, a few extra ones are made: - - only HTR tokens on the inputs; - - new tokens are actually being minted; - - :raises InvalidToken: when there's an error in token operations - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - token_dict = self.get_token_info_from_inputs() - - # we add the created token's info to token_dict, as the creation tx allows for mint/melt - assert self.hash is not None - token_dict[self.hash] = TokenInfo(0, True, True) - - self.update_token_info_from_outputs(token_dict) - - # make sure tokens are being minted - token_info = token_dict[self.hash] - if token_info.amount <= 0: - raise InvalidToken('Token creation transaction must mint new tokens') - - self.check_authorities_and_deposit(token_dict) - - def verify_token_info(self) -> None: - """ Validates token info - """ - name_len = len(self.token_name) - symbol_len = len(self.token_symbol) - if name_len == 0 or name_len > self._settings.MAX_LENGTH_TOKEN_NAME: - raise TransactionDataError('Invalid token name length ({})'.format(name_len)) - if symbol_len == 0 or symbol_len > self._settings.MAX_LENGTH_TOKEN_SYMBOL: - raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) - - # Can't create token with hathor name or symbol - if clean_token_string(self.token_name) == clean_token_string(self._settings.HATHOR_TOKEN_NAME): - raise TransactionDataError('Invalid token name ({})'.format(self.token_name)) - if clean_token_string(self.token_symbol) == clean_token_string(self._settings.HATHOR_TOKEN_SYMBOL): - raise TransactionDataError('Invalid token symbol ({})'.format(self.token_symbol)) - def decode_string_utf8(encoded: bytes, key: str) -> str: """ Raises StructError in case it's not a valid utf-8 string diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index 626010da2..9ca2c20d6 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -17,30 +17,12 @@ from struct import pack from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Optional -from hathor import daa from hathor.checkpoint import Checkpoint from hathor.exception import InvalidNewTransaction from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE -from hathor.transaction.exceptions import ( - ConflictingInputs, - DuplicatedParents, - IncorrectParents, - InexistentInput, - InputOutputMismatch, - InvalidInputData, - InvalidInputDataSize, - InvalidToken, - NoInputError, - RewardLocked, - ScriptError, - TimestampError, - TooManyInputs, - TooManySigOps, - WeightError, -) -from hathor.transaction.util import VerboseCallback, get_deposit_amount, get_withdraw_amount, unpack, unpack_len +from hathor.transaction.util import VerboseCallback, unpack, unpack_len from hathor.types import TokenUid, VertexId from hathor.util import not_none @@ -296,89 +278,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: raise InvalidNewTransaction(f'Invalid new transaction {self.hash_hex}: expected to reach a checkpoint but ' 'none of its children is checkpoint-valid') - def verify_parents_basic(self) -> None: - """Verify number and non-duplicity of parents.""" - assert self.storage is not None - - # check if parents are duplicated - parents_set = set(self.parents) - if len(self.parents) > len(parents_set): - raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in self.parents]) - - if len(self.parents) != 2: - raise IncorrectParents(f'wrong number of parents (tx type): {len(self.parents)}, expecting 2') - - def verify_weight(self) -> None: - """Validate minimum tx difficulty.""" - min_tx_weight = daa.minimum_tx_weight(self) - max_tx_weight = min_tx_weight + self._settings.MAX_TX_WEIGHT_DIFF - if self.weight < min_tx_weight - self._settings.WEIGHT_TOL: - raise WeightError(f'Invalid new tx {self.hash_hex}: weight ({self.weight}) is ' - f'smaller than the minimum weight ({min_tx_weight})') - elif min_tx_weight > self._settings.MAX_TX_WEIGHT_DIFF_ACTIVATION and self.weight > max_tx_weight: - raise WeightError(f'Invalid new tx {self.hash_hex}: weight ({self.weight}) is ' - f'greater than the maximum allowed ({max_tx_weight})') - - def verify_unsigned_skip_pow(self) -> None: - """ Same as .verify but skipping pow and signature verification.""" - self.verify_number_of_inputs() - self.verify_number_of_outputs() - self.verify_outputs() - self.verify_sigops_output() - self.verify_sigops_input() - self.verify_inputs(skip_script=True) # need to run verify_inputs first to check if all inputs exist - self.verify_parents() - self.verify_sum() - - def verify_without_storage(self) -> None: - """ Run all verifications that do not need a storage. - """ - self.verify_pow() - self.verify_number_of_inputs() - self.verify_outputs() - self.verify_sigops_output() - - def verify_number_of_inputs(self) -> None: - """Verify number of inputs is in a valid range""" - if len(self.inputs) > self._settings.MAX_NUM_INPUTS: - raise TooManyInputs('Maximum number of inputs exceeded') - - if len(self.inputs) == 0: - if not self.is_genesis: - raise NoInputError('Transaction must have at least one input') - - def verify_sigops_input(self) -> None: - """ Count sig operations on all inputs and verify that the total sum is below the limit - """ - from hathor.transaction.scripts import get_sigops_count - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - n_txops = 0 - for tx_input in self.inputs: - try: - spent_tx = self.get_spent_tx(tx_input) - except TransactionDoesNotExist: - raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) - assert spent_tx.hash is not None - if tx_input.index >= len(spent_tx.outputs): - raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( - tx_input.tx_id.hex(), tx_input.index)) - n_txops += get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) - - if n_txops > self._settings.MAX_TX_SIGOPS_INPUT: - raise TooManySigOps( - 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(self.hash_hex, n_txops)) - - def verify_outputs(self) -> None: - """Verify outputs reference an existing token uid in the tokens list - - :raises InvalidToken: output references non existent token uid - """ - super().verify_outputs() - for output in self.outputs: - # check index is valid - if output.get_token_index() > len(self.tokens): - raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) - def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ @@ -406,92 +305,6 @@ def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: return token_dict - def update_token_info_from_outputs(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - """Iterate over the outputs and add values to token info dict. Updates the dict in-place. - - Also, checks if no token has authorities on the outputs not present on the inputs - - :raises InvalidToken: when there's an error in token operations - """ - # iterate over outputs and add values to token_dict - for index, tx_output in enumerate(self.outputs): - token_uid = self.get_token_uid(tx_output.get_token_index()) - token_info = token_dict.get(token_uid) - if token_info is None: - raise InvalidToken('no inputs for token {}'.format(token_uid.hex())) - else: - # for authority outputs, make sure the same capability (mint/melt) was present in the inputs - if tx_output.can_mint_token() and not token_info.can_mint: - raise InvalidToken('output has mint authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - if tx_output.can_melt_token() and not token_info.can_melt: - raise InvalidToken('output has melt authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - - if tx_output.is_token_authority(): - # make sure we only have authorities that we know of - if tx_output.value > TxOutput.ALL_AUTHORITIES: - raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) - else: - # for regular outputs, just subtract from the total amount - sum_tokens = token_info.amount + tx_output.value - token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) - - def check_authorities_and_deposit(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs - and outputs is not 0, make sure inputs have mint/melt authority. - - token_dict sums up all tokens present in the tx and their properties (amount, can_mint, can_melt) - amount = outputs - inputs, thus: - - amount < 0 when melting - - amount > 0 when minting - - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - withdraw = 0 - deposit = 0 - for token_uid, token_info in token_dict.items(): - if token_uid == self._settings.HATHOR_TOKEN_UID: - continue - - if token_info.amount == 0: - # that's the usual behavior, nothing to do - pass - elif token_info.amount < 0: - # tokens have been melted - if not token_info.can_melt: - raise InputOutputMismatch('{} {} tokens melted, but there is no melt authority input'.format( - token_info.amount, token_uid.hex())) - withdraw += get_withdraw_amount(token_info.amount) - else: - # tokens have been minted - if not token_info.can_mint: - raise InputOutputMismatch('{} {} tokens minted, but there is no mint authority input'.format( - (-1) * token_info.amount, token_uid.hex())) - deposit += get_deposit_amount(token_info.amount) - - # check whether the deposit/withdraw amount is correct - htr_expected_amount = withdraw - deposit - htr_info = token_dict[self._settings.HATHOR_TOKEN_UID] - if htr_info.amount != htr_expected_amount: - raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( - htr_info.amount, - htr_expected_amount, - )) - - def verify_sum(self) -> None: - """Verify that the sum of outputs is equal of the sum of inputs, for each token. - - If there are authority UTXOs involved, tokens can be minted or melted, so the above rule may - not be respected. - - :raises InvalidToken: when there's an error in token operations - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - token_dict = self.get_token_info_from_inputs() - self.update_token_info_from_outputs(token_dict) - self.check_authorities_and_deposit(token_dict) - def iter_spent_rewards(self) -> Iterator[Block]: """Iterate over all the rewards being spent, assumes tx has been verified.""" for input_tx in self.inputs: @@ -500,51 +313,6 @@ def iter_spent_rewards(self) -> Iterator[Block]: assert isinstance(spent_tx, Block) yield spent_tx - def verify_inputs(self, *, skip_script: bool = False) -> None: - """Verify inputs signatures and ownership and all inputs actually exist""" - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - - spent_outputs: set[tuple[VertexId, int]] = set() - for input_tx in self.inputs: - if len(input_tx.data) > self._settings.MAX_INPUT_DATA_SIZE: - raise InvalidInputDataSize('size: {} and max-size: {}'.format( - len(input_tx.data), self._settings.MAX_INPUT_DATA_SIZE - )) - - try: - spent_tx = self.get_spent_tx(input_tx) - assert spent_tx.hash is not None - if input_tx.index >= len(spent_tx.outputs): - raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( - input_tx.tx_id.hex(), input_tx.index)) - except TransactionDoesNotExist: - raise InexistentInput('Input tx does not exist: {}'.format(input_tx.tx_id.hex())) - - if self.timestamp <= spent_tx.timestamp: - raise TimestampError('tx={} timestamp={}, spent_tx={} timestamp={}'.format( - self.hash.hex() if self.hash else None, - self.timestamp, - spent_tx.hash.hex(), - spent_tx.timestamp, - )) - - if not skip_script: - self.verify_script(input_tx, spent_tx) - - # check if any other input in this tx is spending the same output - key = (input_tx.tx_id, input_tx.index) - if key in spent_outputs: - raise ConflictingInputs('tx {} inputs spend the same output: {} index {}'.format( - self.hash_hex, input_tx.tx_id.hex(), input_tx.index)) - spent_outputs.add(key) - - def verify_reward_locked(self) -> None: - """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only - the block rewards spent by this tx itself, and not the inherited `min_height`.""" - info = self.get_spent_reward_locked_info() - if info is not None: - raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') - def is_spent_reward_locked(self) -> bool: """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx itself, and not the inherited `min_height`""" @@ -578,17 +346,6 @@ def _spent_reward_needed_height(self, block: Block) -> int: needed_height = self._settings.REWARD_SPEND_MIN_BLOCKS - spend_blocks return max(needed_height, 0) - def verify_script(self, input_tx: TxInput, spent_tx: BaseTransaction) -> None: - """ - :type input_tx: TxInput - :type spent_tx: Transaction - """ - from hathor.transaction.scripts import script_eval - try: - script_eval(self, input_tx, spent_tx) - except ScriptError as e: - raise InvalidInputData(e) from e - def is_double_spending(self) -> bool: """ Iterate through inputs to check if they were already spent Used to prevent users from sending double spending transactions to the network diff --git a/hathor/verification/block_verification.py b/hathor/verification/block_verification.py deleted file mode 100644 index 3e47aa254..000000000 --- a/hathor/verification/block_verification.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.profiler import get_cpu_profiler -from hathor.transaction import Block - -cpu = get_cpu_profiler() - - -def verify_basic(block: Block, *, skip_block_weight_verification: bool = False) -> None: - """Partially run validations, the ones that need parents/inputs are skipped.""" - if not skip_block_weight_verification: - block.verify_weight() - block.verify_reward() - - -@cpu.profiler(key=lambda block: 'block-verify!{}'.format(block.hash.hex())) -def verify(block: Block) -> None: - """ - (1) confirms at least two pending transactions and references last block - (2) solves the pow with the correct weight (done in HathorManager) - (3) creates the correct amount of tokens in the output (done in HathorManager) - (4) all parents must exist and have timestamp smaller than ours - (5) data field must contain at most BLOCK_DATA_MAX_SIZE bytes - """ - # TODO Should we validate a limit of outputs? - if block.is_genesis: - # TODO do genesis validation - return - - block.verify_without_storage() - - # (1) and (4) - block.verify_parents() - - block.verify_height() diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py new file mode 100644 index 000000000..6d531c2af --- /dev/null +++ b/hathor/verification/block_verifier.py @@ -0,0 +1,110 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.profiler import get_cpu_profiler +from hathor.transaction import BaseTransaction, Block +from hathor.transaction.exceptions import ( + BlockWithInputs, + BlockWithTokensError, + InvalidBlockReward, + RewardLocked, + TransactionDataError, + WeightError, +) +from hathor.verification.vertex_verifier import VertexVerifier + +cpu = get_cpu_profiler() + + +class BlockVerifier(VertexVerifier): + __slots__ = () + + def verify_basic(self, block: Block, *, skip_block_weight_verification: bool = False) -> None: + """Partially run validations, the ones that need parents/inputs are skipped.""" + if not skip_block_weight_verification: + self.verify_weight(block) + self.verify_reward(block) + + @cpu.profiler(key=lambda _, block: 'block-verify!{}'.format(block.hash.hex())) + def verify(self, block: Block) -> None: + """ + (1) confirms at least two pending transactions and references last block + (2) solves the pow with the correct weight (done in HathorManager) + (3) creates the correct amount of tokens in the output (done in HathorManager) + (4) all parents must exist and have timestamp smaller than ours + (5) data field must contain at most BLOCK_DATA_MAX_SIZE bytes + """ + # TODO Should we validate a limit of outputs? + if block.is_genesis: + # TODO do genesis validation + return + + self.verify_without_storage(block) + + # (1) and (4) + self.verify_parents(block) + + self.verify_height(block) + + def verify_without_storage(self, block: Block) -> None: + """ Run all verifications that do not need a storage. + """ + self.verify_pow(block) + self.verify_no_inputs(block) + self.verify_outputs(block) + self.verify_data(block) + self.verify_sigops_output(block) + + @staticmethod + def verify_height(block: Block) -> None: + """Validate that the block height is enough to confirm all transactions being confirmed.""" + meta = block.get_metadata() + assert meta.height is not None + assert meta.min_height is not None + if meta.height < meta.min_height: + raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') + + def verify_weight(self, block: Block) -> None: + """Validate minimum block difficulty.""" + block_weight = self._daa.calculate_block_difficulty(block) + if block.weight < block_weight - self._settings.WEIGHT_TOL: + raise WeightError(f'Invalid new block {block.hash_hex}: weight ({block.weight}) is ' + f'smaller than the minimum weight ({block_weight})') + + def verify_reward(self, block: Block) -> None: + """Validate reward amount.""" + parent_block = block.get_block_parent() + tokens_issued_per_block = self._daa.get_tokens_issued_per_block(parent_block.get_height() + 1) + if block.sum_outputs != tokens_issued_per_block: + raise InvalidBlockReward( + f'Invalid number of issued tokens tag=invalid_issued_tokens tx.hash={block.hash_hex} ' + f'issued={block.sum_outputs} allowed={tokens_issued_per_block}' + ) + + @staticmethod + def verify_no_inputs(block: Block) -> None: + inputs = getattr(block, 'inputs', None) + if inputs: + raise BlockWithInputs('number of inputs {}'.format(len(inputs))) + + def verify_outputs(self, block: BaseTransaction) -> None: + assert isinstance(block, Block) + super().verify_outputs(block) + for output in block.outputs: + if output.get_token_index() > 0: + raise BlockWithTokensError('in output: {}'.format(output.to_human_readable())) + + def verify_data(self, block: Block) -> None: + if len(block.data) > self._settings.BLOCK_DATA_MAX_SIZE: + raise TransactionDataError('block data has {} bytes'.format(len(block.data))) diff --git a/hathor/verification/merge_mined_block_verifier.py b/hathor/verification/merge_mined_block_verifier.py new file mode 100644 index 000000000..41d34bd4a --- /dev/null +++ b/hathor/verification/merge_mined_block_verifier.py @@ -0,0 +1,32 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction import Block, MergeMinedBlock +from hathor.verification.block_verifier import BlockVerifier + + +class MergeMinedBlockVerifier(BlockVerifier): + __slots__ = () + + def verify_without_storage(self, block: Block) -> None: + assert isinstance(block, MergeMinedBlock) + self.verify_aux_pow(block) + super().verify_without_storage(block) + + @staticmethod + def verify_aux_pow(block: MergeMinedBlock) -> None: + """ Verify auxiliary proof-of-work (for merged mining). + """ + assert block.aux_pow is not None + block.aux_pow.verify(block.get_base_hash()) diff --git a/hathor/verification/token_creation_transaction_verification.py b/hathor/verification/token_creation_transaction_verification.py deleted file mode 100644 index b1d9622b2..000000000 --- a/hathor/verification/token_creation_transaction_verification.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.transaction.token_creation_tx import TokenCreationTransaction -from hathor.verification import transaction_verification - - -def verify(tx: TokenCreationTransaction, *, reject_locked_reward: bool = True) -> None: - """ Run all validations as regular transactions plus validation on token info. - - We also overload verify_sum to make some different checks - """ - transaction_verification.verify(tx, reject_locked_reward=reject_locked_reward) - tx.verify_token_info() diff --git a/hathor/verification/token_creation_transaction_verifier.py b/hathor/verification/token_creation_transaction_verifier.py new file mode 100644 index 000000000..cdb41ace7 --- /dev/null +++ b/hathor/verification/token_creation_transaction_verifier.py @@ -0,0 +1,71 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction.exceptions import InvalidToken, TransactionDataError +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.transaction.transaction import TokenInfo, Transaction +from hathor.transaction.util import clean_token_string +from hathor.verification.transaction_verifier import TransactionVerifier + + +class TokenCreationTransactionVerifier(TransactionVerifier): + __slots__ = () + + def verify(self, tx: TokenCreationTransaction, *, reject_locked_reward: bool = True) -> None: + """ Run all validations as regular transactions plus validation on token info. + + We also overload verify_sum to make some different checks + """ + super().verify(tx, reject_locked_reward=reject_locked_reward) + self.verify_token_info(tx) + + def verify_sum(self, tx: Transaction) -> None: + """ Besides all checks made on regular transactions, a few extra ones are made: + - only HTR tokens on the inputs; + - new tokens are actually being minted; + + :raises InvalidToken: when there's an error in token operations + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + assert isinstance(tx, TokenCreationTransaction) + token_dict = tx.get_token_info_from_inputs() + + # we add the created token's info to token_dict, as the creation tx allows for mint/melt + assert tx.hash is not None + token_dict[tx.hash] = TokenInfo(0, True, True) + + self.update_token_info_from_outputs(tx, token_dict=token_dict) + + # make sure tokens are being minted + token_info = token_dict[tx.hash] + if token_info.amount <= 0: + raise InvalidToken('Token creation transaction must mint new tokens') + + self.verify_authorities_and_deposit(token_dict) + + def verify_token_info(self, tx: TokenCreationTransaction) -> None: + """ Validates token info + """ + name_len = len(tx.token_name) + symbol_len = len(tx.token_symbol) + if name_len == 0 or name_len > self._settings.MAX_LENGTH_TOKEN_NAME: + raise TransactionDataError('Invalid token name length ({})'.format(name_len)) + if symbol_len == 0 or symbol_len > self._settings.MAX_LENGTH_TOKEN_SYMBOL: + raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) + + # Can't create token with hathor name or symbol + if clean_token_string(tx.token_name) == clean_token_string(self._settings.HATHOR_TOKEN_NAME): + raise TransactionDataError('Invalid token name ({})'.format(tx.token_name)) + if clean_token_string(tx.token_symbol) == clean_token_string(self._settings.HATHOR_TOKEN_SYMBOL): + raise TransactionDataError('Invalid token symbol ({})'.format(tx.token_symbol)) diff --git a/hathor/verification/transaction_verification.py b/hathor/verification/transaction_verification.py deleted file mode 100644 index 02d887a10..000000000 --- a/hathor/verification/transaction_verification.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.profiler import get_cpu_profiler -from hathor.transaction import Transaction - -cpu = get_cpu_profiler() - - -def verify_basic(transaction: Transaction) -> None: - """Partially run validations, the ones that need parents/inputs are skipped.""" - if transaction.is_genesis: - # TODO do genesis validation? - return - transaction.verify_parents_basic() - transaction.verify_weight() - transaction.verify_without_storage() - - -@cpu.profiler(key=lambda tx: 'tx-verify!{}'.format(tx.hash.hex())) -def verify(tx: Transaction, *, reject_locked_reward: bool = True) -> None: - """ Common verification for all transactions: - (i) number of inputs is at most 256 - (ii) number of outputs is at most 256 - (iii) confirms at least two pending transactions - (iv) solves the pow (we verify weight is correct in HathorManager) - (v) validates signature of inputs - (vi) validates public key and output (of the inputs) addresses - (vii) validate that both parents are valid - (viii) validate input's timestamps - (ix) validate inputs and outputs sum - """ - if tx.is_genesis: - # TODO do genesis validation - return - tx.verify_without_storage() - tx.verify_sigops_input() - tx.verify_inputs() # need to run verify_inputs first to check if all inputs exist - tx.verify_parents() - tx.verify_sum() - if reject_locked_reward: - tx.verify_reward_locked() diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py new file mode 100644 index 000000000..5714ed46b --- /dev/null +++ b/hathor/verification/transaction_verifier.py @@ -0,0 +1,307 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.profiler import get_cpu_profiler +from hathor.transaction import BaseTransaction, Transaction, TxInput, TxOutput +from hathor.transaction.exceptions import ( + ConflictingInputs, + DuplicatedParents, + IncorrectParents, + InexistentInput, + InputOutputMismatch, + InvalidInputData, + InvalidInputDataSize, + InvalidToken, + NoInputError, + RewardLocked, + ScriptError, + TimestampError, + TooManyInputs, + TooManySigOps, + WeightError, +) +from hathor.transaction.transaction import TokenInfo +from hathor.transaction.util import get_deposit_amount, get_withdraw_amount +from hathor.types import TokenUid, VertexId +from hathor.verification.vertex_verifier import VertexVerifier + +cpu = get_cpu_profiler() + + +class TransactionVerifier(VertexVerifier): + __slots__ = () + + def verify_basic(self, tx: Transaction) -> None: + """Partially run validations, the ones that need parents/inputs are skipped.""" + if tx.is_genesis: + # TODO do genesis validation? + return + self.verify_parents_basic(tx) + self.verify_weight(tx) + self.verify_without_storage(tx) + + @cpu.profiler(key=lambda _, tx: 'tx-verify!{}'.format(tx.hash.hex())) + def verify(self, tx: Transaction, *, reject_locked_reward: bool = True) -> None: + """ Common verification for all transactions: + (i) number of inputs is at most 256 + (ii) number of outputs is at most 256 + (iii) confirms at least two pending transactions + (iv) solves the pow (we verify weight is correct in HathorManager) + (v) validates signature of inputs + (vi) validates public key and output (of the inputs) addresses + (vii) validate that both parents are valid + (viii) validate input's timestamps + (ix) validate inputs and outputs sum + """ + if tx.is_genesis: + # TODO do genesis validation + return + self.verify_without_storage(tx) + self.verify_sigops_input(tx) + self.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist + self.verify_parents(tx) + self.verify_sum(tx) + if reject_locked_reward: + self.verify_reward_locked(tx) + + def verify_unsigned_skip_pow(self, tx: Transaction) -> None: + """ Same as .verify but skipping pow and signature verification.""" + self.verify_number_of_inputs(tx) + self.verify_number_of_outputs(tx) + self.verify_outputs(tx) + self.verify_sigops_output(tx) + self.verify_sigops_input(tx) + self.verify_inputs(tx, skip_script=True) # need to run verify_inputs first to check if all inputs exist + self.verify_parents(tx) + self.verify_sum(tx) + + @staticmethod + def verify_parents_basic(tx: Transaction) -> None: + """Verify number and non-duplicity of parents.""" + assert tx.storage is not None + + # check if parents are duplicated + parents_set = set(tx.parents) + if len(tx.parents) > len(parents_set): + raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in tx.parents]) + + if len(tx.parents) != 2: + raise IncorrectParents(f'wrong number of parents (tx type): {len(tx.parents)}, expecting 2') + + def verify_weight(self, tx: Transaction) -> None: + """Validate minimum tx difficulty.""" + min_tx_weight = self._daa.minimum_tx_weight(tx) + max_tx_weight = min_tx_weight + self._settings.MAX_TX_WEIGHT_DIFF + if tx.weight < min_tx_weight - self._settings.WEIGHT_TOL: + raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' + f'smaller than the minimum weight ({min_tx_weight})') + elif min_tx_weight > self._settings.MAX_TX_WEIGHT_DIFF_ACTIVATION and tx.weight > max_tx_weight: + raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' + f'greater than the maximum allowed ({max_tx_weight})') + + def verify_without_storage(self, tx: Transaction) -> None: + """ Run all verifications that do not need a storage. + """ + self.verify_pow(tx) + self.verify_number_of_inputs(tx) + self.verify_outputs(tx) + self.verify_sigops_output(tx) + + def verify_sigops_input(self, tx: Transaction) -> None: + """ Count sig operations on all inputs and verify that the total sum is below the limit + """ + from hathor.transaction.scripts import get_sigops_count + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + n_txops = 0 + for tx_input in tx.inputs: + try: + spent_tx = tx.get_spent_tx(tx_input) + except TransactionDoesNotExist: + raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) + assert spent_tx.hash is not None + if tx_input.index >= len(spent_tx.outputs): + raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( + tx_input.tx_id.hex(), tx_input.index)) + n_txops += get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) + + if n_txops > self._settings.MAX_TX_SIGOPS_INPUT: + raise TooManySigOps( + 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(tx.hash_hex, n_txops)) + + def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: + """Verify inputs signatures and ownership and all inputs actually exist""" + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + spent_outputs: set[tuple[VertexId, int]] = set() + for input_tx in tx.inputs: + if len(input_tx.data) > self._settings.MAX_INPUT_DATA_SIZE: + raise InvalidInputDataSize('size: {} and max-size: {}'.format( + len(input_tx.data), self._settings.MAX_INPUT_DATA_SIZE + )) + + try: + spent_tx = tx.get_spent_tx(input_tx) + assert spent_tx.hash is not None + if input_tx.index >= len(spent_tx.outputs): + raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( + input_tx.tx_id.hex(), input_tx.index)) + except TransactionDoesNotExist: + raise InexistentInput('Input tx does not exist: {}'.format(input_tx.tx_id.hex())) + + if tx.timestamp <= spent_tx.timestamp: + raise TimestampError('tx={} timestamp={}, spent_tx={} timestamp={}'.format( + tx.hash.hex() if tx.hash else None, + tx.timestamp, + spent_tx.hash.hex(), + spent_tx.timestamp, + )) + + if not skip_script: + self.verify_script(tx=tx, input_tx=input_tx, spent_tx=spent_tx) + + # check if any other input in this tx is spending the same output + key = (input_tx.tx_id, input_tx.index) + if key in spent_outputs: + raise ConflictingInputs('tx {} inputs spend the same output: {} index {}'.format( + tx.hash_hex, input_tx.tx_id.hex(), input_tx.index)) + spent_outputs.add(key) + + @staticmethod + def verify_script(*, tx: Transaction, input_tx: TxInput, spent_tx: BaseTransaction) -> None: + """ + :type tx: Transaction + :type input_tx: TxInput + :type spent_tx: Transaction + """ + from hathor.transaction.scripts import script_eval + try: + script_eval(tx, input_tx, spent_tx) + except ScriptError as e: + raise InvalidInputData(e) from e + + def verify_sum(self, tx: Transaction) -> None: + """Verify that the sum of outputs is equal of the sum of inputs, for each token. + + If there are authority UTXOs involved, tokens can be minted or melted, so the above rule may + not be respected. + + :raises InvalidToken: when there's an error in token operations + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + token_dict = tx.get_token_info_from_inputs() + self.update_token_info_from_outputs(tx, token_dict=token_dict) + self.verify_authorities_and_deposit(token_dict) + + @staticmethod + def verify_reward_locked(tx: Transaction) -> None: + """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only + the block rewards spent by this tx itself, and not the inherited `min_height`.""" + info = tx.get_spent_reward_locked_info() + if info is not None: + raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') + + def verify_number_of_inputs(self, tx: Transaction) -> None: + """Verify number of inputs is in a valid range""" + if len(tx.inputs) > self._settings.MAX_NUM_INPUTS: + raise TooManyInputs('Maximum number of inputs exceeded') + + if len(tx.inputs) == 0: + if not tx.is_genesis: + raise NoInputError('Transaction must have at least one input') + + def verify_outputs(self, tx: BaseTransaction) -> None: + """Verify outputs reference an existing token uid in the tokens list + + :raises InvalidToken: output references non existent token uid + """ + assert isinstance(tx, Transaction) + super().verify_outputs(tx) + for output in tx.outputs: + # check index is valid + if output.get_token_index() > len(tx.tokens): + raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) + + def verify_authorities_and_deposit(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs + and outputs is not 0, make sure inputs have mint/melt authority. + + token_dict sums up all tokens present in the tx and their properties (amount, can_mint, can_melt) + amount = outputs - inputs, thus: + - amount < 0 when melting + - amount > 0 when minting + + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + withdraw = 0 + deposit = 0 + for token_uid, token_info in token_dict.items(): + if token_uid == self._settings.HATHOR_TOKEN_UID: + continue + + if token_info.amount == 0: + # that's the usual behavior, nothing to do + pass + elif token_info.amount < 0: + # tokens have been melted + if not token_info.can_melt: + raise InputOutputMismatch('{} {} tokens melted, but there is no melt authority input'.format( + token_info.amount, token_uid.hex())) + withdraw += get_withdraw_amount(token_info.amount) + else: + # tokens have been minted + if not token_info.can_mint: + raise InputOutputMismatch('{} {} tokens minted, but there is no mint authority input'.format( + (-1) * token_info.amount, token_uid.hex())) + deposit += get_deposit_amount(token_info.amount) + + # check whether the deposit/withdraw amount is correct + htr_expected_amount = withdraw - deposit + htr_info = token_dict[self._settings.HATHOR_TOKEN_UID] + if htr_info.amount != htr_expected_amount: + raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( + htr_info.amount, + htr_expected_amount, + )) + + @staticmethod + def update_token_info_from_outputs(tx: Transaction, *, token_dict: dict[TokenUid, TokenInfo]) -> None: + """Iterate over the outputs and add values to token info dict. Updates the dict in-place. + + Also, checks if no token has authorities on the outputs not present on the inputs + + :raises InvalidToken: when there's an error in token operations + """ + # iterate over outputs and add values to token_dict + for index, tx_output in enumerate(tx.outputs): + token_uid = tx.get_token_uid(tx_output.get_token_index()) + token_info = token_dict.get(token_uid) + if token_info is None: + raise InvalidToken('no inputs for token {}'.format(token_uid.hex())) + else: + # for authority outputs, make sure the same capability (mint/melt) was present in the inputs + if tx_output.can_mint_token() and not token_info.can_mint: + raise InvalidToken('output has mint authority, but no input has it: {}'.format( + tx_output.to_human_readable())) + if tx_output.can_melt_token() and not token_info.can_melt: + raise InvalidToken('output has melt authority, but no input has it: {}'.format( + tx_output.to_human_readable())) + + if tx_output.is_token_authority(): + # make sure we only have authorities that we know of + if tx_output.value > TxOutput.ALL_AUTHORITIES: + raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) + else: + # for regular outputs, just subtract from the total amount + sum_tokens = token_info.amount + tx_output.value + token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 2a98cb662..3f87369ff 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -12,15 +12,41 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.transaction import BaseTransaction, Block, Transaction, TxVersion +from typing import NamedTuple + +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.exceptions import TxValidationError from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.validation_state import ValidationState -from hathor.verification import block_verification, token_creation_transaction_verification, transaction_verification +from hathor.verification.block_verifier import BlockVerifier +from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier +from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier +from hathor.verification.transaction_verifier import TransactionVerifier + + +class VertexVerifiers(NamedTuple): + block: BlockVerifier + merge_mined_block: MergeMinedBlockVerifier + tx: TransactionVerifier + token_creation_tx: TokenCreationTransactionVerifier + + @classmethod + def create(cls, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm) -> 'VertexVerifiers': + return VertexVerifiers( + block=BlockVerifier(settings=settings, daa=daa), + merge_mined_block=MergeMinedBlockVerifier(settings=settings, daa=daa), + tx=TransactionVerifier(settings=settings, daa=daa), + token_creation_tx=TokenCreationTransactionVerifier(settings=settings, daa=daa), + ) class VerificationService: - __slots__ = () + __slots__ = ('verifiers', ) + + def __init__(self, *, verifiers: VertexVerifiers) -> None: + self.verifiers = verifiers def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verification: bool = False) -> bool: """ Run basic validations (all that are possible without dependencies) and update the validation state. @@ -70,12 +96,24 @@ def verify_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificatio Used by `self.validate_basic`. Should not modify the validation state.""" match vertex.version: - case TxVersion.REGULAR_BLOCK | TxVersion.MERGE_MINED_BLOCK: + case TxVersion.REGULAR_BLOCK: assert isinstance(vertex, Block) - block_verification.verify_basic(vertex, skip_block_weight_verification=skip_block_weight_verification) - case TxVersion.REGULAR_TRANSACTION | TxVersion.TOKEN_CREATION_TRANSACTION: + self.verifiers.block.verify_basic( + vertex, + skip_block_weight_verification=skip_block_weight_verification + ) + case TxVersion.MERGE_MINED_BLOCK: + assert isinstance(vertex, MergeMinedBlock) + self.verifiers.merge_mined_block.verify_basic( + vertex, + skip_block_weight_verification=skip_block_weight_verification + ) + case TxVersion.REGULAR_TRANSACTION: assert isinstance(vertex, Transaction) - transaction_verification.verify_basic(vertex) + self.verifiers.tx.verify_basic(vertex) + case TxVersion.TOKEN_CREATION_TRANSACTION: + assert isinstance(vertex, TokenCreationTransaction) + self.verifiers.token_creation_tx.verify_basic(vertex) case _: raise NotImplementedError @@ -84,15 +122,35 @@ def verify(self, vertex: BaseTransaction, *, reject_locked_reward: bool = True) Used by `self.validate_full`. Should not modify the validation state.""" match vertex.version: - case TxVersion.REGULAR_BLOCK | TxVersion.MERGE_MINED_BLOCK: + case TxVersion.REGULAR_BLOCK: + assert isinstance(vertex, Block) + self.verifiers.block.verify(vertex) + case TxVersion.MERGE_MINED_BLOCK: + assert isinstance(vertex, MergeMinedBlock) + self.verifiers.merge_mined_block.verify(vertex) + case TxVersion.REGULAR_TRANSACTION: + assert isinstance(vertex, Transaction) + self.verifiers.tx.verify(vertex, reject_locked_reward=reject_locked_reward) + case TxVersion.TOKEN_CREATION_TRANSACTION: + assert isinstance(vertex, TokenCreationTransaction) + self.verifiers.token_creation_tx.verify(vertex, reject_locked_reward=reject_locked_reward) + case _: + raise NotImplementedError + + def verify_without_storage(self, vertex: BaseTransaction) -> None: + match vertex.version: + case TxVersion.REGULAR_BLOCK: assert isinstance(vertex, Block) - block_verification.verify(vertex) + self.verifiers.block.verify_without_storage(vertex) + case TxVersion.MERGE_MINED_BLOCK: + assert isinstance(vertex, MergeMinedBlock) + self.verifiers.merge_mined_block.verify_without_storage(vertex) case TxVersion.REGULAR_TRANSACTION: assert isinstance(vertex, Transaction) - transaction_verification.verify(vertex, reject_locked_reward=reject_locked_reward) + self.verifiers.tx.verify_without_storage(vertex) case TxVersion.TOKEN_CREATION_TRANSACTION: assert isinstance(vertex, TokenCreationTransaction) - token_creation_transaction_verification.verify(vertex, reject_locked_reward=reject_locked_reward) + self.verifiers.token_creation_tx.verify_without_storage(vertex) case _: raise NotImplementedError diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py new file mode 100644 index 000000000..0834a0671 --- /dev/null +++ b/hathor/verification/vertex_verifier.py @@ -0,0 +1,179 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from hathor.conf.settings import HathorSettings +from hathor.daa import DifficultyAdjustmentAlgorithm +from hathor.transaction import BaseTransaction +from hathor.transaction.exceptions import ( + DuplicatedParents, + IncorrectParents, + InvalidOutputScriptSize, + InvalidOutputValue, + InvalidToken, + ParentDoesNotExist, + PowError, + TimestampError, + TooManyOutputs, + TooManySigOps, +) + +# tx should have 2 parents, both other transactions +_TX_PARENTS_TXS = 2 +_TX_PARENTS_BLOCKS = 0 + +# blocks have 3 parents, 2 txs and 1 block +_BLOCK_PARENTS_TXS = 2 +_BLOCK_PARENTS_BLOCKS = 1 + + +class VertexVerifier: + __slots__ = ('_settings', '_daa') + + def __init__(self, *, settings: HathorSettings, daa: DifficultyAdjustmentAlgorithm): + self._settings = settings + self._daa = daa + + def verify_parents(self, vertex: BaseTransaction) -> None: + """All parents must exist and their timestamps must be smaller than ours. + + Also, txs should have 2 other txs as parents, while blocks should have 2 txs + 1 block. + + Parents must be ordered with blocks first, followed by transactions. + + :raises TimestampError: when our timestamp is less or equal than our parent's timestamp + :raises ParentDoesNotExist: when at least one of our parents does not exist + :raises IncorrectParents: when tx does not confirm the correct number/type of parent txs + """ + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + assert vertex.storage is not None + + # check if parents are duplicated + parents_set = set(vertex.parents) + if len(vertex.parents) > len(parents_set): + raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in vertex.parents]) + + my_parents_txs = 0 # number of tx parents + my_parents_blocks = 0 # number of block parents + min_timestamp: Optional[int] = None + + for parent_hash in vertex.parents: + try: + parent = vertex.storage.get_transaction(parent_hash) + assert parent.hash is not None + if vertex.timestamp <= parent.timestamp: + raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( + vertex.hash_hex, + vertex.timestamp, + parent.hash_hex, + parent.timestamp, + )) + + if parent.is_block: + if vertex.is_block and not parent.is_genesis: + if vertex.timestamp - parent.timestamp > self._settings.MAX_DISTANCE_BETWEEN_BLOCKS: + raise TimestampError('Distance between blocks is too big' + ' ({} seconds)'.format(vertex.timestamp - parent.timestamp)) + if my_parents_txs > 0: + raise IncorrectParents('Parents which are blocks must come before transactions') + for pi_hash in parent.parents: + pi = vertex.storage.get_transaction(parent_hash) + if not pi.is_block: + min_timestamp = ( + min(min_timestamp, pi.timestamp) if min_timestamp is not None + else pi.timestamp + ) + my_parents_blocks += 1 + else: + if min_timestamp and parent.timestamp < min_timestamp: + raise TimestampError('tx={} timestamp={}, parent={} timestamp={}, min_timestamp={}'.format( + vertex.hash_hex, + vertex.timestamp, + parent.hash_hex, + parent.timestamp, + min_timestamp + )) + my_parents_txs += 1 + except TransactionDoesNotExist: + raise ParentDoesNotExist('tx={} parent={}'.format(vertex.hash_hex, parent_hash.hex())) + + # check for correct number of parents + if vertex.is_block: + parents_txs = _BLOCK_PARENTS_TXS + parents_blocks = _BLOCK_PARENTS_BLOCKS + else: + parents_txs = _TX_PARENTS_TXS + parents_blocks = _TX_PARENTS_BLOCKS + if my_parents_blocks != parents_blocks: + raise IncorrectParents('wrong number of parents (block type): {}, expecting {}'.format( + my_parents_blocks, parents_blocks)) + if my_parents_txs != parents_txs: + raise IncorrectParents('wrong number of parents (tx type): {}, expecting {}'.format( + my_parents_txs, parents_txs)) + + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + """Verify proof-of-work + + :raises PowError: when the hash is equal or greater than the target + """ + assert vertex.hash is not None + numeric_hash = int(vertex.hash_hex, vertex.HEX_BASE) + minimum_target = vertex.get_target(override_weight) + if numeric_hash >= minimum_target: + raise PowError(f'Transaction has invalid data ({numeric_hash} < {minimum_target})') + + def verify_outputs(self, vertex: BaseTransaction) -> None: + """Verify there are no hathor authority UTXOs and outputs are all positive + + :raises InvalidToken: when there's a hathor authority utxo + :raises InvalidOutputValue: output has negative value + :raises TooManyOutputs: when there are too many outputs + """ + self.verify_number_of_outputs(vertex) + for index, output in enumerate(vertex.outputs): + # no hathor authority UTXO + if (output.get_token_index() == 0) and output.is_token_authority(): + raise InvalidToken('Cannot have authority UTXO for hathor tokens: {}'.format( + output.to_human_readable())) + + # output value must be positive + if output.value <= 0: + raise InvalidOutputValue('Output value must be a positive integer. Value: {} and index: {}'.format( + output.value, index)) + + if len(output.script) > self._settings.MAX_OUTPUT_SCRIPT_SIZE: + raise InvalidOutputScriptSize('size: {} and max-size: {}'.format( + len(output.script), self._settings.MAX_OUTPUT_SCRIPT_SIZE + )) + + def verify_number_of_outputs(self, vertex: BaseTransaction) -> None: + """Verify number of outputs does not exceeds the limit""" + if len(vertex.outputs) > self._settings.MAX_NUM_OUTPUTS: + raise TooManyOutputs('Maximum number of outputs exceeded') + + def verify_sigops_output(self, vertex: BaseTransaction) -> None: + """ Count sig operations on all outputs and verify that the total sum is below the limit + """ + from hathor.transaction.scripts import get_sigops_count + n_txops = 0 + + for tx_output in vertex.outputs: + n_txops += get_sigops_count(tx_output.script) + + if n_txops > self._settings.MAX_TX_SIGOPS_OUTPUT: + raise TooManySigOps('TX[{}]: Maximum number of sigops for all outputs exceeded ({})'.format( + vertex.hash_hex, n_txops)) diff --git a/hathor/wallet/resources/nano_contracts/execute.py b/hathor/wallet/resources/nano_contracts/execute.py index d08bd4c5a..46cce3ad6 100644 --- a/hathor/wallet/resources/nano_contracts/execute.py +++ b/hathor/wallet/resources/nano_contracts/execute.py @@ -20,7 +20,6 @@ from hathor.api_util import Resource, get_missing_params_msg, render_options, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight from hathor.transaction import Transaction, TxInput, TxOutput from hathor.transaction.scripts import P2PKH, NanoContractMatchValues from hathor.util import json_dumpb, json_loadb @@ -99,7 +98,7 @@ def render_POST(self, request): tx.parents = self.manager.get_new_tx_parents() tx.update_timestamp(int(self.manager.reactor.seconds())) - tx.weight = minimum_tx_weight(tx) + tx.weight = self.manager.daa.minimum_tx_weight(tx) tx.resolve() success = self.manager.propagate_tx(tx) diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index cf14f11fd..268292c12 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -20,7 +20,6 @@ from hathor.api_util import Resource, render_options, set_cors from hathor.cli.openapi_files.register import register_resource from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight from hathor.exception import InvalidNewTransaction from hathor.transaction import Transaction from hathor.transaction.exceptions import TxValidationError @@ -125,7 +124,7 @@ def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union tx.parents = values['parents'] weight = values['weight'] if weight is None: - weight = minimum_tx_weight(tx) + weight = self.manager.daa.minimum_tx_weight(tx) tx.weight = weight tx.resolve() self.manager.verification_service.verify(tx) diff --git a/hathor/wallet/resources/sign_tx.py b/hathor/wallet/resources/sign_tx.py index cef27a689..b70ccb76b 100644 --- a/hathor/wallet/resources/sign_tx.py +++ b/hathor/wallet/resources/sign_tx.py @@ -17,7 +17,6 @@ from hathor.api_util import Resource, get_args, get_missing_params_msg, set_cors from hathor.cli.openapi_files.register import register_resource -from hathor.daa import minimum_tx_weight from hathor.transaction import Transaction from hathor.util import json_dumpb @@ -67,7 +66,7 @@ def render_GET(self, request): if prepare_to_send: tx.parents = self.manager.get_new_tx_parents() tx.update_timestamp(int(self.manager.reactor.seconds())) - tx.weight = minimum_tx_weight(tx) + tx.weight = self.manager.daa.minimum_tx_weight(tx) tx.resolve() data = {'hex_tx': tx.get_struct().hex(), 'success': True} diff --git a/tests/p2p/test_get_best_blockchain.py b/tests/p2p/test_get_best_blockchain.py index a37e4a742..806444be0 100644 --- a/tests/p2p/test_get_best_blockchain.py +++ b/tests/p2p/test_get_best_blockchain.py @@ -2,7 +2,6 @@ from hathor.conf import HathorSettings from hathor.indexes.height_index import HeightInfo -from hathor.manager import DEFAULT_CAPABILITIES from hathor.p2p.messages import ProtocolMessages from hathor.p2p.resources import StatusResource from hathor.p2p.states import ReadyState @@ -229,7 +228,8 @@ def test_node_without_get_best_blockchain_capability(self): protocol2 = connected_peers1[0] self.assertTrue(protocol2.capabilities.issuperset(set(cababilities_without_get_best_blockchain))) protocol1 = connected_peers2[0] - self.assertTrue(protocol1.capabilities.issuperset(set(DEFAULT_CAPABILITIES))) + default_capabilities = manager2.get_default_capabilities() + self.assertTrue(protocol1.capabilities.issuperset(set(default_capabilities))) # assert the peers don't engage in get_best_blockchain messages state2 = protocol2.state diff --git a/tests/resources/wallet/test_thin_wallet.py b/tests/resources/wallet/test_thin_wallet.py index 033a2050f..5fb854f84 100644 --- a/tests/resources/wallet/test_thin_wallet.py +++ b/tests/resources/wallet/test_thin_wallet.py @@ -4,7 +4,6 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address -from hathor.daa import minimum_tx_weight from hathor.transaction import Transaction, TxInput, TxOutput, genesis from hathor.transaction.scripts import P2PKH, create_output_script, parse_address_script from hathor.wallet.resources.thin_wallet import ( @@ -85,7 +84,7 @@ def test_post(self): i.data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx2.inputs = [i] tx2.timestamp = int(self.clock.seconds()) - tx2.weight = minimum_tx_weight(tx2) + tx2.weight = self.manager.daa.minimum_tx_weight(tx2) response_wrong_amount = yield self.web.post('thin_wallet/send_tokens', {'tx_hex': tx2.get_struct().hex()}) data_wrong_amount = response_wrong_amount.json_value() @@ -100,7 +99,7 @@ def test_post(self): i.data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx3.inputs = [i] tx3.timestamp = int(self.clock.seconds()) - tx3.weight = minimum_tx_weight(tx3) + tx3.weight = self.manager.daa.minimum_tx_weight(tx3) # Then send tokens response = yield self.web.post('thin_wallet/send_tokens', {'tx_hex': tx3.get_struct().hex()}) @@ -423,7 +422,7 @@ def test_token_history(self): i.data = P2PKH.create_input_data(public_key_bytes, signature_bytes) tx2.inputs = [i] tx2.timestamp = int(self.clock.seconds()) - tx2.weight = minimum_tx_weight(tx2) + tx2.weight = self.manager.daa.minimum_tx_weight(tx2) tx2.parents = self.manager.get_new_tx_parents() tx2.resolve() self.manager.propagate_tx(tx2) diff --git a/tests/simulation/test_simulator.py b/tests/simulation/test_simulator.py index cce6c795b..a373af657 100644 --- a/tests/simulation/test_simulator.py +++ b/tests/simulation/test_simulator.py @@ -2,6 +2,7 @@ from hathor.simulator import FakeConnection from hathor.simulator.trigger import All as AllTriggers, StopWhenSynced +from hathor.verification.vertex_verifier import VertexVerifier from tests import unittest from tests.simulation.base import SimulatorTestCase @@ -12,7 +13,7 @@ def test_verify_pow(self): # just get one of the genesis, we don't really need to create any transaction tx = next(iter(manager1.tx_storage.get_all_genesis())) # optional argument must be valid, it just has to not raise any exception, there's no assert for that - tx.verify_pow(0.) + VertexVerifier.verify_pow(tx, override_weight=0.) def test_one_node(self): manager1 = self.create_peer() diff --git a/tests/tx/test_blockchain.py b/tests/tx/test_blockchain.py index d808975e9..f52228c60 100644 --- a/tests/tx/test_blockchain.py +++ b/tests/tx/test_blockchain.py @@ -1,7 +1,7 @@ from itertools import chain from hathor.conf import HathorSettings -from hathor.daa import TestMode, _set_test_mode, get_weight_decay_amount +from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode, _set_test_mode from hathor.transaction import sum_weights from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest @@ -30,6 +30,7 @@ def setUp(self): self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] + self.daa = DifficultyAdjustmentAlgorithm(settings=settings) def test_single_chain(self): """ All new blocks belong to case (i). @@ -420,42 +421,30 @@ def test_daa_weight_decay_amount(self): amount = settings.WEIGHT_DECAY_AMOUNT for distance in range(0, settings.WEIGHT_DECAY_ACTIVATE_DISTANCE, 10): - self.assertEqual(get_weight_decay_amount(distance), 0) + self.assertEqual(self.daa.get_weight_decay_amount(distance), 0) distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - 1 - self.assertAlmostEqual(get_weight_decay_amount(distance), 0) + self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), 0) distance = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE for k in range(1, 11): for _ in range(settings.WEIGHT_DECAY_WINDOW_SIZE): - self.assertAlmostEqual(get_weight_decay_amount(distance), k * amount) + self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), k * amount) distance += 1 - self.assertAlmostEqual(get_weight_decay_amount(distance), 11 * amount) + self.assertAlmostEqual(self.daa.get_weight_decay_amount(distance), 11 * amount) def test_daa_weight_decay_blocks(self): - from hathor import daa - orig_avg_time_between_blocks = daa.AVG_TIME_BETWEEN_BLOCKS - orig_min_block_weight = daa.MIN_BLOCK_WEIGHT - - try: - self._test_daa_weight_decay_blocks() - finally: - daa.AVG_TIME_BETWEEN_BLOCKS = orig_avg_time_between_blocks - daa.MIN_BLOCK_WEIGHT = orig_min_block_weight - - def _test_daa_weight_decay_blocks(self): _set_test_mode(TestMode.DISABLED) manager = self.create_peer('testnet', tx_storage=self.tx_storage) amount = settings.WEIGHT_DECAY_AMOUNT - from hathor import daa - daa.AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS - daa.MIN_BLOCK_WEIGHT = 2 + 2 * settings.WEIGHT_DECAY_AMOUNT + manager.daa.AVG_TIME_BETWEEN_BLOCKS = settings.AVG_TIME_BETWEEN_BLOCKS + manager.daa.MIN_BLOCK_WEIGHT = 2 + 2 * settings.WEIGHT_DECAY_AMOUNT add_new_blocks(manager, 2 * settings.BLOCK_DIFFICULTY_N_BLOCKS, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) - daa.MIN_BLOCK_WEIGHT = 1 + manager.daa.MIN_BLOCK_WEIGHT = 1 base_weight = manager.generate_mining_block().weight - self.assertGreater(base_weight, daa.MIN_BLOCK_WEIGHT) + self.assertGreater(base_weight, manager.daa.MIN_BLOCK_WEIGHT) add_new_blocks(manager, 20, advance_clock=settings.AVG_TIME_BETWEEN_BLOCKS) @@ -482,7 +471,7 @@ def _test_daa_weight_decay_blocks(self): manager.reactor.advance(1) weight = manager.generate_mining_block().weight - self.assertAlmostEqual(weight, daa.MIN_BLOCK_WEIGHT) + self.assertAlmostEqual(weight, manager.daa.MIN_BLOCK_WEIGHT) class SyncV1BlockchainTestCase(unittest.SyncV1Params, BaseBlockchainTestCase): diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index a30759193..d1e8b9d13 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -1,6 +1,8 @@ from hathor.conf import HathorSettings -from hathor.daa import TestMode, _set_test_mode, calculate_block_difficulty, minimum_tx_weight +from hathor.daa import DifficultyAdjustmentAlgorithm, TestMode, _set_test_mode from hathor.transaction.storage import TransactionMemoryStorage +from hathor.verification.verification_service import VerificationService, VertexVerifiers +from hathor.verification.vertex_verifier import VertexVerifier from tests import unittest settings = HathorSettings() @@ -26,18 +28,21 @@ def get_genesis_output(): class GenesisTest(unittest.TestCase): def setUp(self): super().setUp() + self._daa = DifficultyAdjustmentAlgorithm(settings=self._settings) + verifiers = VertexVerifiers.create(settings=self._settings, daa=self._daa) + self._verification_service = VerificationService(verifiers=verifiers) self.storage = TransactionMemoryStorage() def test_pow(self): genesis = self.storage.get_all_genesis() for g in genesis: self.assertEqual(g.calculate_hash(), g.hash) - self.assertIsNone(g.verify_pow()) + self.assertIsNone(VertexVerifier.verify_pow(g)) def test_verify(self): genesis = self.storage.get_all_genesis() for g in genesis: - g.verify_without_storage() + self._verification_service.verify_without_storage(g) def test_output(self): # Test if block output is valid @@ -65,9 +70,9 @@ def test_genesis_weight(self): # Validate the block and tx weight # in test mode weight is always 1 _set_test_mode(TestMode.TEST_ALL_WEIGHT) - self.assertEqual(calculate_block_difficulty(genesis_block), 1) - self.assertEqual(minimum_tx_weight(genesis_tx), 1) + self.assertEqual(self._daa.calculate_block_difficulty(genesis_block), 1) + self.assertEqual(self._daa.minimum_tx_weight(genesis_tx), 1) _set_test_mode(TestMode.DISABLED) - self.assertEqual(calculate_block_difficulty(genesis_block), genesis_block.weight) - self.assertEqual(minimum_tx_weight(genesis_tx), genesis_tx.weight) + self.assertEqual(self._daa.calculate_block_difficulty(genesis_block), genesis_block.weight) + self.assertEqual(self._daa.minimum_tx_weight(genesis_tx), genesis_tx.weight) diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 96cc51ce2..d245361ea 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -2,7 +2,6 @@ import hashlib from math import isinf, isnan -from hathor import daa from hathor.crypto.util import decode_address, get_address_from_public_key, get_private_key_from_bytes from hathor.daa import TestMode, _set_test_mode from hathor.transaction import MAX_OUTPUT_VALUE, Block, Transaction, TxInput, TxOutput @@ -30,6 +29,7 @@ from hathor.transaction.scripts import P2PKH, parse_address_script from hathor.transaction.util import int_to_bytes from hathor.transaction.validation_state import ValidationState +from hathor.verification.verification_service import VertexVerifiers from hathor.wallet import Wallet from tests import unittest from tests.utils import ( @@ -51,6 +51,7 @@ def setUp(self): # this makes sure we can spend the genesis outputs self.manager = self.create_peer('testnet', unlock_wallet=True, wallet_index=True, use_memory_storage=True) self.tx_storage = self.manager.tx_storage + self._verifiers = VertexVerifiers.create(settings=self._settings, daa=self.manager.daa) # read genesis keys self.genesis_private_key = get_genesis_key() @@ -80,7 +81,7 @@ def test_input_output_match(self): _input.data = P2PKH.create_input_data(public_bytes, signature) with self.assertRaises(InputOutputMismatch): - tx.verify_sum() + self._verifiers.tx.verify_sum(tx) def test_validation(self): # add 100 blocks and check that walking through get_next_block_best_chain yields the same blocks @@ -120,7 +121,7 @@ def test_script(self): _input.data = data_wrong with self.assertRaises(InvalidInputData): - tx.verify_inputs() + self._verifiers.tx.verify_inputs(tx) def test_too_many_inputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -131,13 +132,13 @@ def test_too_many_inputs(self): tx = Transaction(inputs=inputs, storage=self.tx_storage) with self.assertRaises(TooManyInputs): - tx.verify_number_of_inputs() + self._verifiers.tx.verify_number_of_inputs(tx) def test_no_inputs(self): tx = Transaction(inputs=[], storage=self.tx_storage) with self.assertRaises(NoInputError): - tx.verify_number_of_inputs() + self._verifiers.tx.verify_number_of_inputs(tx) def test_too_many_outputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -148,7 +149,7 @@ def test_too_many_outputs(self): tx = Transaction(outputs=outputs, storage=self.tx_storage) with self.assertRaises(TooManyOutputs): - tx.verify_number_of_outputs() + self._verifiers.tx.verify_number_of_outputs(tx) def _gen_tx_spending_genesis_block(self): parents = [tx.hash for tx in self.genesis_txs] @@ -246,11 +247,11 @@ def test_merge_mined_no_magic(self): ) with self.assertRaises(AuxPowNoMagicError): - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) # adding the MAGIC_NUMBER makes it work: b.aux_pow = b.aux_pow._replace(coinbase_head=b.aux_pow.coinbase_head + MAGIC_NUMBER) - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) def test_merge_mined_multiple_magic(self): from hathor.merged_mining import MAGIC_NUMBER @@ -312,9 +313,9 @@ def test_merge_mined_multiple_magic(self): assert bytes(b1) != bytes(b2) assert b1.calculate_hash() == b2.calculate_hash() - b1.verify_aux_pow() # OK + self._verifiers.merge_mined_block.verify_aux_pow(b1) # OK with self.assertRaises(AuxPowUnexpectedMagicError): - b2.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b2) def test_merge_mined_long_merkle_path(self): from hathor.merged_mining import MAGIC_NUMBER @@ -341,11 +342,11 @@ def test_merge_mined_long_merkle_path(self): ) with self.assertRaises(AuxPowLongMerklePathError): - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) # removing one path makes it work b.aux_pow.merkle_path.pop() - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) def test_block_outputs(self): from hathor.transaction.exceptions import TooManyOutputs @@ -365,7 +366,7 @@ def test_block_outputs(self): storage=self.tx_storage) with self.assertRaises(TooManyOutputs): - block.verify_outputs() + self._verifiers.block.verify_outputs(block) def test_tx_number_parents(self): genesis_block = self.genesis_blocks[0] @@ -530,11 +531,11 @@ def test_tx_weight_too_high(self): inputs = [TxInput(b'', 0, b'')] tx = Transaction(weight=1, inputs=inputs, outputs=outputs, parents=parents, storage=self.tx_storage, timestamp=self.last_block.timestamp + 1) - tx.weight = daa.minimum_tx_weight(tx) + tx.weight = self.manager.daa.minimum_tx_weight(tx) tx.weight += self._settings.MAX_TX_WEIGHT_DIFF + 0.1 tx.update_hash() with self.assertRaises(WeightError): - tx.verify_weight() + self._verifiers.tx.verify_weight(tx) def test_weight_nan(self): # this should succeed @@ -682,34 +683,34 @@ def test_tx_methods(self): self.assertFalse(tx_equal.is_genesis) # Pow error - tx2.verify_pow() + self._verifiers.tx.verify_pow(tx2) tx2.weight = 100 with self.assertRaises(PowError): - tx2.verify_pow() + self._verifiers.tx.verify_pow(tx2) # Verify parent timestamps - tx2.verify_parents() + self._verifiers.tx.verify_parents(tx2) tx2_timestamp = tx2.timestamp tx2.timestamp = 2 with self.assertRaises(TimestampError): - tx2.verify_parents() + self._verifiers.tx.verify_parents(tx2) tx2.timestamp = tx2_timestamp # Verify inputs timestamps - tx2.verify_inputs() + self._verifiers.tx.verify_inputs(tx2) tx2.timestamp = 2 with self.assertRaises(TimestampError): - tx2.verify_inputs() + self._verifiers.tx.verify_inputs(tx2) tx2.timestamp = tx2_timestamp # Validate maximum distance between blocks block = blocks[0] block2 = blocks[1] block2.timestamp = block.timestamp + self._settings.MAX_DISTANCE_BETWEEN_BLOCKS - block2.verify_parents() + self._verifiers.block.verify_parents(block2) block2.timestamp += 1 with self.assertRaises(TimestampError): - block2.verify_parents() + self._verifiers.block.verify_parents(block2) def test_block_big_nonce(self): block = self.genesis_blocks[0] @@ -886,7 +887,7 @@ def _test_txout_script_limit(self, offset): _output = TxOutput(value, script) tx = Transaction(inputs=[_input], outputs=[_output], storage=self.tx_storage) - tx.verify_outputs() + self._verifiers.tx.verify_outputs(tx) def test_txout_script_limit_exceeded(self): with self.assertRaises(InvalidOutputScriptSize): @@ -910,7 +911,7 @@ def _test_txin_data_limit(self, offset): outputs=[_output], storage=self.tx_storage ) - tx.verify_inputs(skip_script=True) + self._verifiers.tx.verify_inputs(tx, skip_script=True) def test_txin_data_limit_exceeded(self): with self.assertRaises(InvalidInputDataSize): @@ -1063,7 +1064,7 @@ def test_sigops_output_single_below_limit(self) -> None: output3 = TxOutput(value, hscript) tx = Transaction(inputs=[_input], outputs=[output3], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_output() + self._verifiers.tx.verify_sigops_output(tx) def test_sigops_output_multi_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1075,7 +1076,7 @@ def test_sigops_output_multi_below_limit(self) -> None: output4 = TxOutput(value, hscript) tx = Transaction(inputs=[_input], outputs=[output4]*num_outputs, storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_output() + self._verifiers.tx.verify_sigops_output(tx) def test_sigops_input_single_above_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1117,7 +1118,7 @@ def test_sigops_input_single_below_limit(self) -> None: input3 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input3], outputs=[_output], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_input() + self._verifiers.tx.verify_sigops_input(tx) def test_sigops_input_multi_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1131,7 +1132,7 @@ def test_sigops_input_multi_below_limit(self) -> None: input4 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input4]*num_inputs, outputs=[_output], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_input() + self._verifiers.tx.verify_sigops_input(tx) def test_compare_bytes_equal(self) -> None: # create some block diff --git a/tests/tx/test_tx_deserialization.py b/tests/tx/test_tx_deserialization.py index 7e15598f3..8469b9edb 100644 --- a/tests/tx/test_tx_deserialization.py +++ b/tests/tx/test_tx_deserialization.py @@ -1,10 +1,18 @@ +from hathor.daa import DifficultyAdjustmentAlgorithm from hathor.transaction import Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.verification.verification_service import VerificationService, VertexVerifiers from tests import unittest class _BaseTest: class _DeserializationTest(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + daa = DifficultyAdjustmentAlgorithm(settings=self._settings) + verifiers = VertexVerifiers.create(settings=self._settings, daa=daa) + self._verification_service = VerificationService(verifiers=verifiers) + def test_deserialize(self): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes) @@ -18,7 +26,7 @@ def verbose(key, value): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes, verbose=verbose) - tx.verify_without_storage() + self._verification_service.verify_without_storage(tx) key, version = v[1] self.assertEqual(key, 'version') diff --git a/tests/unittest.py b/tests/unittest.py index e9ccfdbb3..881c613e6 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -183,7 +183,7 @@ def create_peer_from_builder(self, builder, start_manager=True): def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unlock_wallet=True, wallet_index=False, capabilities=None, full_verification=True, enable_sync_v1=None, enable_sync_v2=None, checkpoints=None, utxo_index=False, event_manager=None, use_memory_index=None, start_manager=True, - pubsub=None, event_storage=None, enable_event_queue=None, use_memory_storage=None): + pubsub=None, event_storage=None, enable_event_queue=None, use_memory_storage=None, daa=None): enable_sync_v1, enable_sync_v2 = self._syncVersionFlags(enable_sync_v1, enable_sync_v2) @@ -246,6 +246,9 @@ def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unloc if utxo_index: builder.enable_utxo_index() + if daa: + builder.set_daa(daa) + manager = self.create_peer_from_builder(builder, start_manager=start_manager) # XXX: just making sure that tests set this up correctly diff --git a/tests/wallet/test_wallet_hd.py b/tests/wallet/test_wallet_hd.py index 5c18648cb..fe0676630 100644 --- a/tests/wallet/test_wallet_hd.py +++ b/tests/wallet/test_wallet_hd.py @@ -1,6 +1,7 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.transaction import Transaction +from hathor.verification.transaction_verifier import TransactionVerifier from hathor.wallet import HDWallet from hathor.wallet.base_wallet import WalletBalance, WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import InsufficientFunds @@ -42,7 +43,7 @@ def test_transaction_and_balance(self): out = WalletOutputInfo(decode_address(new_address2), self.TOKENS, timelock=None) tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, [out], self.tx_storage) tx1.update_hash() - tx1.verify_script(tx1.inputs[0], block) + TransactionVerifier.verify_script(tx=tx1, input_tx=tx1.inputs[0], spent_tx=block) tx1.storage = self.tx_storage tx1.get_metadata().validation = ValidationState.FULL self.wallet.on_new_tx(tx1) @@ -62,7 +63,7 @@ def test_transaction_and_balance(self): tx2.storage = self.tx_storage tx2.update_hash() tx2.storage = self.tx_storage - tx2.verify_script(tx2.inputs[0], tx1) + TransactionVerifier.verify_script(tx=tx2, input_tx=tx2.inputs[0], spent_tx=tx1) tx2.get_metadata().validation = ValidationState.FULL self.tx_storage.save_transaction(tx2) self.wallet.on_new_tx(tx2)