From da50f5358a75b5d978f9ad2237724a32510dec69 Mon Sep 17 00:00:00 2001 From: Gabriel Levcovitz Date: Mon, 25 Sep 2023 19:09:07 -0300 Subject: [PATCH] refactor(verification): move verification methods --- hathor/builder/builder.py | 18 +- hathor/builder/cli_builder.py | 5 +- hathor/cli/mining.py | 7 +- hathor/simulator/simulator.py | 24 +- hathor/simulator/verification.py | 54 +++ hathor/stratum/stratum.py | 5 +- hathor/transaction/base_transaction.py | 153 +-------- hathor/transaction/block.py | 60 +--- hathor/transaction/merge_mined_block.py | 10 - hathor/transaction/resources/create_tx.py | 2 +- hathor/transaction/token_creation_tx.py | 44 +-- hathor/transaction/transaction.py | 245 +------------- hathor/verification/block_verification.py | 47 --- hathor/verification/block_verifier.py | 112 +++++++ .../merge_mined_block_verifier.py | 32 ++ ...token_creation_transaction_verification.py | 25 -- .../token_creation_transaction_verifier.py | 71 ++++ .../verification/transaction_verification.py | 53 --- hathor/verification/transaction_verifier.py | 308 ++++++++++++++++++ hathor/verification/verification_service.py | 79 ++++- hathor/verification/vertex_verifier.py | 177 ++++++++++ tests/simulation/test_simulator.py | 3 +- tests/tx/test_genesis.py | 8 +- tests/tx/test_tx.py | 56 ++-- tests/tx/test_tx_deserialization.py | 8 +- tests/wallet/test_wallet_hd.py | 5 +- 26 files changed, 918 insertions(+), 693 deletions(-) create mode 100644 hathor/simulator/verification.py delete mode 100644 hathor/verification/block_verification.py create mode 100644 hathor/verification/block_verifier.py create mode 100644 hathor/verification/merge_mined_block_verifier.py delete mode 100644 hathor/verification/token_creation_transaction_verification.py create mode 100644 hathor/verification/token_creation_transaction_verifier.py delete mode 100644 hathor/verification/transaction_verification.py create mode 100644 hathor/verification/transaction_verifier.py create mode 100644 hathor/verification/vertex_verifier.py diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index ecb2bd02c..c67d20d63 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -41,7 +41,7 @@ TransactionStorage, ) from hathor.util import Random, Reactor, get_environment_info -from hathor.verification.verification_service import VerificationService +from hathor.verification.verification_service import VerificationService, VertexVerifiers from hathor.wallet import BaseWallet, Wallet logger = get_logger() @@ -102,6 +102,7 @@ def __init__(self) -> None: self._feature_service: Optional[FeatureService] = None self._bit_signaling_service: Optional[BitSignalingService] = None + self._vertex_verifiers: Optional[VertexVerifiers] = None self._verification_service: Optional[VerificationService] = None self._rocksdb_path: Optional[str] = None @@ -432,10 +433,18 @@ def _get_or_create_bit_signaling_service(self, tx_storage: TransactionStorage) - def _get_or_create_verification_service(self) -> VerificationService: if self._verification_service is None: - self._verification_service = VerificationService() + verifiers = self._get_or_create_vertex_verifiers() + self._verification_service = VerificationService(verifiers=verifiers) return self._verification_service + def _get_or_create_vertex_verifiers(self) -> VertexVerifiers: + if self._vertex_verifiers is None: + settings = self._get_or_create_settings() + self._vertex_verifiers = VertexVerifiers.create(settings=settings) + + return self._vertex_verifiers + def use_memory(self) -> 'Builder': self.check_if_can_modify() self._storage_type = StorageType.MEMORY @@ -533,6 +542,11 @@ def set_verification_service(self, verification_service: VerificationService) -> self._verification_service = verification_service return self + def set_vertex_verifiers(self, vertex_verifiers: VertexVerifiers) -> 'Builder': + self.check_if_can_modify() + self._vertex_verifiers = vertex_verifiers + return self + def set_reactor(self, reactor: Reactor) -> 'Builder': self.check_if_can_modify() self._reactor = reactor diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index d49928f26..441c95855 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -35,7 +35,7 @@ from hathor.pubsub import PubSubManager from hathor.stratum import StratumFactory from hathor.util import Random, Reactor -from hathor.verification.verification_service import VerificationService +from hathor.verification.verification_service import VerificationService, VertexVerifiers from hathor.wallet import BaseWallet, HDWallet, Wallet logger = get_logger() @@ -202,7 +202,8 @@ def create_manager(self, reactor: Reactor) -> HathorManager: not_support_features=self._args.signal_not_support ) - verification_service = VerificationService() + vertex_verifiers = VertexVerifiers.create(settings=settings) + verification_service = VerificationService(verifiers=vertex_verifiers) p2p_manager = ConnectionsManager( reactor, diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 9a373be90..df99b2913 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -24,6 +24,9 @@ import requests +from hathor.conf.get_settings import get_settings +from hathor.verification.block_verifier import BlockVerifier + _SLEEP_ON_ERROR_SECONDS = 5 _MAX_CONN_RETRIES = math.inf @@ -134,7 +137,9 @@ def execute(args: Namespace) -> None: block.nonce, block.weight)) try: - block.verify_without_storage() + settings = get_settings() + verifier = BlockVerifier(settings=settings) + verifier.verify_without_storage(block) except HathorError: print('[{}] ERROR: Block has not been pushed because it is not valid.'.format(datetime.datetime.now())) else: diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index cb655bbac..ac1d88ade 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -28,7 +28,14 @@ from hathor.simulator.clock import HeapClock, MemoryReactorHeapClock from hathor.simulator.miner.geometric_miner import GeometricMiner from hathor.simulator.tx_generator import RandomTransactionGenerator +from hathor.simulator.verification import ( + SimulatorBlockVerifier, + SimulatorMergeMinedBlockVerifier, + SimulatorTokenCreationTransactionVerifier, + SimulatorTransactionVerifier, +) from hathor.util import Random +from hathor.verification.verification_service import VertexVerifiers from hathor.wallet import HDWallet if TYPE_CHECKING: @@ -52,25 +59,17 @@ def _apply_patches(cls): Patches: - - disable pow verification - disable Transaction.resolve method - set DAA test-mode to DISABLED (will actually run the pow function, that won't actually verify the pow) - override AVG_TIME_BETWEEN_BLOCKS to 64 """ from hathor.transaction import BaseTransaction - def verify_pow(self: BaseTransaction, *args: Any, **kwargs: Any) -> None: - assert self.hash is not None - logger.new().debug('Skipping BaseTransaction.verify_pow() for simulator') - def resolve(self: BaseTransaction, update_time: bool = True) -> bool: self.update_hash() logger.new().debug('Skipping BaseTransaction.resolve() for simulator') return True - cls._original_verify_pow = BaseTransaction.verify_pow - BaseTransaction.verify_pow = verify_pow - cls._original_resolve = BaseTransaction.resolve BaseTransaction.resolve = resolve @@ -85,7 +84,6 @@ def _remove_patches(cls): """ Remove the patches previously applied. """ from hathor.transaction import BaseTransaction - BaseTransaction.verify_pow = cls._original_verify_pow BaseTransaction.resolve = cls._original_resolve from hathor import daa @@ -170,10 +168,18 @@ def create_artifacts(self, builder: Optional[Builder] = None) -> BuildArtifacts: wallet = HDWallet(gap_limit=2) wallet._manually_initialize() + vertex_verifiers = VertexVerifiers( + block=SimulatorBlockVerifier(settings=self.settings), + merge_mined_block=SimulatorMergeMinedBlockVerifier(settings=self.settings), + tx=SimulatorTransactionVerifier(settings=self.settings), + token_creation_tx=SimulatorTokenCreationTransactionVerifier(settings=self.settings), + ) + artifacts = builder \ .set_reactor(self._clock) \ .set_rng(Random(self.rng.getrandbits(64))) \ .set_wallet(wallet) \ + .set_vertex_verifiers(vertex_verifiers) \ .build() artifacts.manager.start() diff --git a/hathor/simulator/verification.py b/hathor/simulator/verification.py new file mode 100644 index 000000000..849a9a0e9 --- /dev/null +++ b/hathor/simulator/verification.py @@ -0,0 +1,54 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from structlog import get_logger + +from hathor.transaction import BaseTransaction +from hathor.verification.block_verifier import BlockVerifier +from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier +from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier +from hathor.verification.transaction_verifier import TransactionVerifier + +logger = get_logger() + + +def verify_pow(vertex: BaseTransaction) -> None: + assert vertex.hash is not None + logger.new().debug('Skipping BaseTransaction.verify_pow() for simulator') + + +class SimulatorBlockVerifier(BlockVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) + + +class SimulatorMergeMinedBlockVerifier(MergeMinedBlockVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) + + +class SimulatorTransactionVerifier(TransactionVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) + + +class SimulatorTokenCreationTransactionVerifier(TokenCreationTransactionVerifier): + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + verify_pow(vertex) diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index 6abc2dfbd..16b278661 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -41,6 +41,7 @@ from hathor.transaction import BaseTransaction, BitcoinAuxPow, Block, MergeMinedBlock, Transaction, sum_weights from hathor.transaction.exceptions import PowError, ScriptError, TxValidationError from hathor.util import Reactor, json_dumpb, json_loadb, reactor +from hathor.verification.vertex_verifier import VertexVerifier from hathor.wallet.exceptions import InvalidAddress if TYPE_CHECKING: @@ -526,7 +527,7 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: self.log.debug('share received', block=tx, block_base=block_base.hex(), block_base_hash=block_base_hash.hex()) try: - tx.verify_pow(job.weight) + VertexVerifier.verify_pow(tx, override_weight=job.weight) except PowError: self.log.error('bad share, discard', job_weight=job.weight, tx=tx) return self.send_error(INVALID_SOLUTION, msgid, { @@ -542,7 +543,7 @@ def handle_submit(self, params: dict, msgid: Optional[str]) -> None: self.manager.reactor.callLater(0, self.job_request) try: - tx.verify_pow() + VertexVerifier.verify_pow(tx) except PowError: # Transaction pow was not enough, but the share was succesfully submited self.log.info('high hash, keep mining', tx=tx) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index ea185893e..02de172a7 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -28,19 +28,7 @@ from hathor.checkpoint import Checkpoint from hathor.conf.get_settings import get_settings -from hathor.transaction.exceptions import ( - DuplicatedParents, - IncorrectParents, - InvalidOutputScriptSize, - InvalidOutputValue, - InvalidToken, - ParentDoesNotExist, - PowError, - TimestampError, - TooManyOutputs, - TooManySigOps, - WeightError, -) +from hathor.transaction.exceptions import InvalidOutputValue, WeightError from hathor.transaction.transaction_metadata import TransactionMetadata from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.transaction.validation_state import ValidationState @@ -70,14 +58,6 @@ # Weight (d), timestamp (I), and parents len (B) _GRAPH_FORMAT_STRING = '!dIB' -# tx should have 2 parents, both other transactions -_TX_PARENTS_TXS = 2 -_TX_PARENTS_BLOCKS = 0 - -# blocks have 3 parents, 2 txs and 1 block -_BLOCK_PARENTS_TXS = 2 -_BLOCK_PARENTS_BLOCKS = 1 - # The int value of one byte _ONE_BYTE = 0xFF @@ -540,137 +520,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: To be implemented by tx/block, used by `self.validate_checkpoint`. Should not modify the validation state.""" raise NotImplementedError - def verify_parents(self) -> None: - """All parents must exist and their timestamps must be smaller than ours. - - Also, txs should have 2 other txs as parents, while blocks should have 2 txs + 1 block. - - Parents must be ordered with blocks first, followed by transactions. - - :raises TimestampError: when our timestamp is less or equal than our parent's timestamp - :raises ParentDoesNotExist: when at least one of our parents does not exist - :raises IncorrectParents: when tx does not confirm the correct number/type of parent txs - """ - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - - assert self.storage is not None - - # check if parents are duplicated - parents_set = set(self.parents) - if len(self.parents) > len(parents_set): - raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in self.parents]) - - my_parents_txs = 0 # number of tx parents - my_parents_blocks = 0 # number of block parents - min_timestamp: Optional[int] = None - - for parent_hash in self.parents: - try: - parent = self.storage.get_transaction(parent_hash) - assert parent.hash is not None - if self.timestamp <= parent.timestamp: - raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( - self.hash_hex, - self.timestamp, - parent.hash_hex, - parent.timestamp, - )) - - if parent.is_block: - if self.is_block and not parent.is_genesis: - if self.timestamp - parent.timestamp > self._settings.MAX_DISTANCE_BETWEEN_BLOCKS: - raise TimestampError('Distance between blocks is too big' - ' ({} seconds)'.format(self.timestamp - parent.timestamp)) - if my_parents_txs > 0: - raise IncorrectParents('Parents which are blocks must come before transactions') - for pi_hash in parent.parents: - pi = self.storage.get_transaction(parent_hash) - if not pi.is_block: - min_timestamp = ( - min(min_timestamp, pi.timestamp) if min_timestamp is not None - else pi.timestamp - ) - my_parents_blocks += 1 - else: - if min_timestamp and parent.timestamp < min_timestamp: - raise TimestampError('tx={} timestamp={}, parent={} timestamp={}, min_timestamp={}'.format( - self.hash_hex, - self.timestamp, - parent.hash_hex, - parent.timestamp, - min_timestamp - )) - my_parents_txs += 1 - except TransactionDoesNotExist: - raise ParentDoesNotExist('tx={} parent={}'.format(self.hash_hex, parent_hash.hex())) - - # check for correct number of parents - if self.is_block: - parents_txs = _BLOCK_PARENTS_TXS - parents_blocks = _BLOCK_PARENTS_BLOCKS - else: - parents_txs = _TX_PARENTS_TXS - parents_blocks = _TX_PARENTS_BLOCKS - if my_parents_blocks != parents_blocks: - raise IncorrectParents('wrong number of parents (block type): {}, expecting {}'.format( - my_parents_blocks, parents_blocks)) - if my_parents_txs != parents_txs: - raise IncorrectParents('wrong number of parents (tx type): {}, expecting {}'.format( - my_parents_txs, parents_txs)) - - def verify_pow(self, override_weight: Optional[float] = None) -> None: - """Verify proof-of-work - - :raises PowError: when the hash is equal or greater than the target - """ - assert self.hash is not None - numeric_hash = int(self.hash_hex, self.HEX_BASE) - minimum_target = self.get_target(override_weight) - if numeric_hash >= minimum_target: - raise PowError(f'Transaction has invalid data ({numeric_hash} < {minimum_target})') - - def verify_number_of_outputs(self) -> None: - """Verify number of outputs does not exceeds the limit""" - if len(self.outputs) > self._settings.MAX_NUM_OUTPUTS: - raise TooManyOutputs('Maximum number of outputs exceeded') - - def verify_sigops_output(self) -> None: - """ Count sig operations on all outputs and verify that the total sum is below the limit - """ - from hathor.transaction.scripts import get_sigops_count - n_txops = 0 - - for tx_output in self.outputs: - n_txops += get_sigops_count(tx_output.script) - - if n_txops > self._settings.MAX_TX_SIGOPS_OUTPUT: - raise TooManySigOps('TX[{}]: Maximum number of sigops for all outputs exceeded ({})'.format( - self.hash_hex, n_txops)) - - def verify_outputs(self) -> None: - """Verify there are no hathor authority UTXOs and outputs are all positive - - :raises InvalidToken: when there's a hathor authority utxo - :raises InvalidOutputValue: output has negative value - :raises TooManyOutputs: when there are too many outputs - """ - self.verify_number_of_outputs() - for index, output in enumerate(self.outputs): - # no hathor authority UTXO - if (output.get_token_index() == 0) and output.is_token_authority(): - raise InvalidToken('Cannot have authority UTXO for hathor tokens: {}'.format( - output.to_human_readable())) - - # output value must be positive - if output.value <= 0: - raise InvalidOutputValue('Output value must be a positive integer. Value: {} and index: {}'.format( - output.value, index)) - - if len(output.script) > self._settings.MAX_OUTPUT_SCRIPT_SIZE: - raise InvalidOutputScriptSize('size: {} and max-size: {}'.format( - len(output.script), self._settings.MAX_OUTPUT_SCRIPT_SIZE - )) - def resolve(self, update_time: bool = False) -> bool: """Run a CPU mining looking for the nonce that solves the proof-of-work diff --git a/hathor/transaction/block.py b/hathor/transaction/block.py index bef6f3368..b416d9fd3 100644 --- a/hathor/transaction/block.py +++ b/hathor/transaction/block.py @@ -18,21 +18,12 @@ from struct import pack from typing import TYPE_CHECKING, Any, Optional -from hathor import daa from hathor.checkpoint import Checkpoint from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, TxOutput, TxVersion -from hathor.transaction.exceptions import ( - BlockWithInputs, - BlockWithTokensError, - CheckpointError, - InvalidBlockReward, - RewardLocked, - TransactionDataError, - WeightError, -) +from hathor.transaction.exceptions import CheckpointError from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len from hathor.util import not_none from hathor.utils.int import get_bit_list @@ -337,55 +328,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: # TODO: check whether self is a parent of any checkpoint-valid block, this is left for a future PR pass - def verify_weight(self) -> None: - """Validate minimum block difficulty.""" - block_weight = daa.calculate_block_difficulty(self) - if self.weight < block_weight - self._settings.WEIGHT_TOL: - raise WeightError(f'Invalid new block {self.hash_hex}: weight ({self.weight}) is ' - f'smaller than the minimum weight ({block_weight})') - - def verify_height(self) -> None: - """Validate that the block height is enough to confirm all transactions being confirmed.""" - meta = self.get_metadata() - assert meta.height is not None - assert meta.min_height is not None - if meta.height < meta.min_height: - raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') - - def verify_reward(self) -> None: - """Validate reward amount.""" - parent_block = self.get_block_parent() - tokens_issued_per_block = daa.get_tokens_issued_per_block(parent_block.get_height() + 1) - if self.sum_outputs != tokens_issued_per_block: - raise InvalidBlockReward( - f'Invalid number of issued tokens tag=invalid_issued_tokens tx.hash={self.hash_hex} ' - f'issued={self.sum_outputs} allowed={tokens_issued_per_block}' - ) - - def verify_no_inputs(self) -> None: - inputs = getattr(self, 'inputs', None) - if inputs: - raise BlockWithInputs('number of inputs {}'.format(len(inputs))) - - def verify_outputs(self) -> None: - super().verify_outputs() - for output in self.outputs: - if output.get_token_index() > 0: - raise BlockWithTokensError('in output: {}'.format(output.to_human_readable())) - - def verify_data(self) -> None: - if len(self.data) > self._settings.BLOCK_DATA_MAX_SIZE: - raise TransactionDataError('block data has {} bytes'.format(len(self.data))) - - def verify_without_storage(self) -> None: - """ Run all verifications that do not need a storage. - """ - self.verify_pow() - self.verify_no_inputs() - self.verify_outputs() - self.verify_data() - self.verify_sigops_output() - def get_base_hash(self) -> bytes: from hathor.merged_mining.bitcoin import sha256d_hash return sha256d_hash(self.get_header_without_nonce()) diff --git a/hathor/transaction/merge_mined_block.py b/hathor/transaction/merge_mined_block.py index 121011a23..a6818ecde 100644 --- a/hathor/transaction/merge_mined_block.py +++ b/hathor/transaction/merge_mined_block.py @@ -74,13 +74,3 @@ def to_json(self, decode_script: bool = False, include_metadata: bool = False) - del json['nonce'] json['aux_pow'] = bytes(self.aux_pow).hex() if self.aux_pow else None return json - - def verify_without_storage(self) -> None: - self.verify_aux_pow() - super().verify_without_storage() - - def verify_aux_pow(self) -> None: - """ Verify auxiliary proof-of-work (for merged mining). - """ - assert self.aux_pow is not None - self.aux_pow.verify(self.get_base_hash()) diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index 438d1f23d..dcec5d363 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -89,7 +89,7 @@ def render_POST(self, request): # conservative estimate of the input data size to estimate a valid weight tx_input.data = b'\0' * 107 tx.weight = minimum_tx_weight(fake_signed_tx) - tx.verify_unsigned_skip_pow() + self.manager.verification_service.verifiers.tx.verify_unsigned_skip_pow(tx) if tx.is_double_spending(): raise InvalidNewTransaction('At least one of your inputs has already been spent.') diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index c2e63f9f2..5bcc672a8 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -16,10 +16,9 @@ from typing import Any, Optional from hathor.transaction.base_transaction import TxInput, TxOutput, TxVersion -from hathor.transaction.exceptions import InvalidToken, TransactionDataError from hathor.transaction.storage import TransactionStorage # noqa: F401 -from hathor.transaction.transaction import TokenInfo, Transaction -from hathor.transaction.util import VerboseCallback, clean_token_string, int_to_bytes, unpack, unpack_len +from hathor.transaction.transaction import Transaction +from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len # Signal bits (B), version (B), inputs len (B), outputs len (B) _FUNDS_FORMAT_STRING = '!BBBB' @@ -220,45 +219,6 @@ def to_json_extended(self) -> dict[str, Any]: json['tokens'] = [] return json - def verify_sum(self) -> None: - """ Besides all checks made on regular transactions, a few extra ones are made: - - only HTR tokens on the inputs; - - new tokens are actually being minted; - - :raises InvalidToken: when there's an error in token operations - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - token_dict = self.get_token_info_from_inputs() - - # we add the created token's info to token_dict, as the creation tx allows for mint/melt - assert self.hash is not None - token_dict[self.hash] = TokenInfo(0, True, True) - - self.update_token_info_from_outputs(token_dict) - - # make sure tokens are being minted - token_info = token_dict[self.hash] - if token_info.amount <= 0: - raise InvalidToken('Token creation transaction must mint new tokens') - - self.check_authorities_and_deposit(token_dict) - - def verify_token_info(self) -> None: - """ Validates token info - """ - name_len = len(self.token_name) - symbol_len = len(self.token_symbol) - if name_len == 0 or name_len > self._settings.MAX_LENGTH_TOKEN_NAME: - raise TransactionDataError('Invalid token name length ({})'.format(name_len)) - if symbol_len == 0 or symbol_len > self._settings.MAX_LENGTH_TOKEN_SYMBOL: - raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) - - # Can't create token with hathor name or symbol - if clean_token_string(self.token_name) == clean_token_string(self._settings.HATHOR_TOKEN_NAME): - raise TransactionDataError('Invalid token name ({})'.format(self.token_name)) - if clean_token_string(self.token_symbol) == clean_token_string(self._settings.HATHOR_TOKEN_SYMBOL): - raise TransactionDataError('Invalid token symbol ({})'.format(self.token_symbol)) - def decode_string_utf8(encoded: bytes, key: str) -> str: """ Raises StructError in case it's not a valid utf-8 string diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index 626010da2..9ca2c20d6 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -17,30 +17,12 @@ from struct import pack from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Optional -from hathor import daa from hathor.checkpoint import Checkpoint from hathor.exception import InvalidNewTransaction from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, TxInput, TxOutput, TxVersion from hathor.transaction.base_transaction import TX_HASH_SIZE -from hathor.transaction.exceptions import ( - ConflictingInputs, - DuplicatedParents, - IncorrectParents, - InexistentInput, - InputOutputMismatch, - InvalidInputData, - InvalidInputDataSize, - InvalidToken, - NoInputError, - RewardLocked, - ScriptError, - TimestampError, - TooManyInputs, - TooManySigOps, - WeightError, -) -from hathor.transaction.util import VerboseCallback, get_deposit_amount, get_withdraw_amount, unpack, unpack_len +from hathor.transaction.util import VerboseCallback, unpack, unpack_len from hathor.types import TokenUid, VertexId from hathor.util import not_none @@ -296,89 +278,6 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: raise InvalidNewTransaction(f'Invalid new transaction {self.hash_hex}: expected to reach a checkpoint but ' 'none of its children is checkpoint-valid') - def verify_parents_basic(self) -> None: - """Verify number and non-duplicity of parents.""" - assert self.storage is not None - - # check if parents are duplicated - parents_set = set(self.parents) - if len(self.parents) > len(parents_set): - raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in self.parents]) - - if len(self.parents) != 2: - raise IncorrectParents(f'wrong number of parents (tx type): {len(self.parents)}, expecting 2') - - def verify_weight(self) -> None: - """Validate minimum tx difficulty.""" - min_tx_weight = daa.minimum_tx_weight(self) - max_tx_weight = min_tx_weight + self._settings.MAX_TX_WEIGHT_DIFF - if self.weight < min_tx_weight - self._settings.WEIGHT_TOL: - raise WeightError(f'Invalid new tx {self.hash_hex}: weight ({self.weight}) is ' - f'smaller than the minimum weight ({min_tx_weight})') - elif min_tx_weight > self._settings.MAX_TX_WEIGHT_DIFF_ACTIVATION and self.weight > max_tx_weight: - raise WeightError(f'Invalid new tx {self.hash_hex}: weight ({self.weight}) is ' - f'greater than the maximum allowed ({max_tx_weight})') - - def verify_unsigned_skip_pow(self) -> None: - """ Same as .verify but skipping pow and signature verification.""" - self.verify_number_of_inputs() - self.verify_number_of_outputs() - self.verify_outputs() - self.verify_sigops_output() - self.verify_sigops_input() - self.verify_inputs(skip_script=True) # need to run verify_inputs first to check if all inputs exist - self.verify_parents() - self.verify_sum() - - def verify_without_storage(self) -> None: - """ Run all verifications that do not need a storage. - """ - self.verify_pow() - self.verify_number_of_inputs() - self.verify_outputs() - self.verify_sigops_output() - - def verify_number_of_inputs(self) -> None: - """Verify number of inputs is in a valid range""" - if len(self.inputs) > self._settings.MAX_NUM_INPUTS: - raise TooManyInputs('Maximum number of inputs exceeded') - - if len(self.inputs) == 0: - if not self.is_genesis: - raise NoInputError('Transaction must have at least one input') - - def verify_sigops_input(self) -> None: - """ Count sig operations on all inputs and verify that the total sum is below the limit - """ - from hathor.transaction.scripts import get_sigops_count - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - n_txops = 0 - for tx_input in self.inputs: - try: - spent_tx = self.get_spent_tx(tx_input) - except TransactionDoesNotExist: - raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) - assert spent_tx.hash is not None - if tx_input.index >= len(spent_tx.outputs): - raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( - tx_input.tx_id.hex(), tx_input.index)) - n_txops += get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) - - if n_txops > self._settings.MAX_TX_SIGOPS_INPUT: - raise TooManySigOps( - 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(self.hash_hex, n_txops)) - - def verify_outputs(self) -> None: - """Verify outputs reference an existing token uid in the tokens list - - :raises InvalidToken: output references non existent token uid - """ - super().verify_outputs() - for output in self.outputs: - # check index is valid - if output.get_token_index() > len(self.tokens): - raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) - def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ @@ -406,92 +305,6 @@ def get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: return token_dict - def update_token_info_from_outputs(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - """Iterate over the outputs and add values to token info dict. Updates the dict in-place. - - Also, checks if no token has authorities on the outputs not present on the inputs - - :raises InvalidToken: when there's an error in token operations - """ - # iterate over outputs and add values to token_dict - for index, tx_output in enumerate(self.outputs): - token_uid = self.get_token_uid(tx_output.get_token_index()) - token_info = token_dict.get(token_uid) - if token_info is None: - raise InvalidToken('no inputs for token {}'.format(token_uid.hex())) - else: - # for authority outputs, make sure the same capability (mint/melt) was present in the inputs - if tx_output.can_mint_token() and not token_info.can_mint: - raise InvalidToken('output has mint authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - if tx_output.can_melt_token() and not token_info.can_melt: - raise InvalidToken('output has melt authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - - if tx_output.is_token_authority(): - # make sure we only have authorities that we know of - if tx_output.value > TxOutput.ALL_AUTHORITIES: - raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) - else: - # for regular outputs, just subtract from the total amount - sum_tokens = token_info.amount + tx_output.value - token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) - - def check_authorities_and_deposit(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs - and outputs is not 0, make sure inputs have mint/melt authority. - - token_dict sums up all tokens present in the tx and their properties (amount, can_mint, can_melt) - amount = outputs - inputs, thus: - - amount < 0 when melting - - amount > 0 when minting - - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - withdraw = 0 - deposit = 0 - for token_uid, token_info in token_dict.items(): - if token_uid == self._settings.HATHOR_TOKEN_UID: - continue - - if token_info.amount == 0: - # that's the usual behavior, nothing to do - pass - elif token_info.amount < 0: - # tokens have been melted - if not token_info.can_melt: - raise InputOutputMismatch('{} {} tokens melted, but there is no melt authority input'.format( - token_info.amount, token_uid.hex())) - withdraw += get_withdraw_amount(token_info.amount) - else: - # tokens have been minted - if not token_info.can_mint: - raise InputOutputMismatch('{} {} tokens minted, but there is no mint authority input'.format( - (-1) * token_info.amount, token_uid.hex())) - deposit += get_deposit_amount(token_info.amount) - - # check whether the deposit/withdraw amount is correct - htr_expected_amount = withdraw - deposit - htr_info = token_dict[self._settings.HATHOR_TOKEN_UID] - if htr_info.amount != htr_expected_amount: - raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( - htr_info.amount, - htr_expected_amount, - )) - - def verify_sum(self) -> None: - """Verify that the sum of outputs is equal of the sum of inputs, for each token. - - If there are authority UTXOs involved, tokens can be minted or melted, so the above rule may - not be respected. - - :raises InvalidToken: when there's an error in token operations - :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt - """ - token_dict = self.get_token_info_from_inputs() - self.update_token_info_from_outputs(token_dict) - self.check_authorities_and_deposit(token_dict) - def iter_spent_rewards(self) -> Iterator[Block]: """Iterate over all the rewards being spent, assumes tx has been verified.""" for input_tx in self.inputs: @@ -500,51 +313,6 @@ def iter_spent_rewards(self) -> Iterator[Block]: assert isinstance(spent_tx, Block) yield spent_tx - def verify_inputs(self, *, skip_script: bool = False) -> None: - """Verify inputs signatures and ownership and all inputs actually exist""" - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - - spent_outputs: set[tuple[VertexId, int]] = set() - for input_tx in self.inputs: - if len(input_tx.data) > self._settings.MAX_INPUT_DATA_SIZE: - raise InvalidInputDataSize('size: {} and max-size: {}'.format( - len(input_tx.data), self._settings.MAX_INPUT_DATA_SIZE - )) - - try: - spent_tx = self.get_spent_tx(input_tx) - assert spent_tx.hash is not None - if input_tx.index >= len(spent_tx.outputs): - raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( - input_tx.tx_id.hex(), input_tx.index)) - except TransactionDoesNotExist: - raise InexistentInput('Input tx does not exist: {}'.format(input_tx.tx_id.hex())) - - if self.timestamp <= spent_tx.timestamp: - raise TimestampError('tx={} timestamp={}, spent_tx={} timestamp={}'.format( - self.hash.hex() if self.hash else None, - self.timestamp, - spent_tx.hash.hex(), - spent_tx.timestamp, - )) - - if not skip_script: - self.verify_script(input_tx, spent_tx) - - # check if any other input in this tx is spending the same output - key = (input_tx.tx_id, input_tx.index) - if key in spent_outputs: - raise ConflictingInputs('tx {} inputs spend the same output: {} index {}'.format( - self.hash_hex, input_tx.tx_id.hex(), input_tx.index)) - spent_outputs.add(key) - - def verify_reward_locked(self) -> None: - """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only - the block rewards spent by this tx itself, and not the inherited `min_height`.""" - info = self.get_spent_reward_locked_info() - if info is not None: - raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') - def is_spent_reward_locked(self) -> bool: """ Check whether any spent reward is currently locked, considering only the block rewards spent by this tx itself, and not the inherited `min_height`""" @@ -578,17 +346,6 @@ def _spent_reward_needed_height(self, block: Block) -> int: needed_height = self._settings.REWARD_SPEND_MIN_BLOCKS - spend_blocks return max(needed_height, 0) - def verify_script(self, input_tx: TxInput, spent_tx: BaseTransaction) -> None: - """ - :type input_tx: TxInput - :type spent_tx: Transaction - """ - from hathor.transaction.scripts import script_eval - try: - script_eval(self, input_tx, spent_tx) - except ScriptError as e: - raise InvalidInputData(e) from e - def is_double_spending(self) -> bool: """ Iterate through inputs to check if they were already spent Used to prevent users from sending double spending transactions to the network diff --git a/hathor/verification/block_verification.py b/hathor/verification/block_verification.py deleted file mode 100644 index 3e47aa254..000000000 --- a/hathor/verification/block_verification.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.profiler import get_cpu_profiler -from hathor.transaction import Block - -cpu = get_cpu_profiler() - - -def verify_basic(block: Block, *, skip_block_weight_verification: bool = False) -> None: - """Partially run validations, the ones that need parents/inputs are skipped.""" - if not skip_block_weight_verification: - block.verify_weight() - block.verify_reward() - - -@cpu.profiler(key=lambda block: 'block-verify!{}'.format(block.hash.hex())) -def verify(block: Block) -> None: - """ - (1) confirms at least two pending transactions and references last block - (2) solves the pow with the correct weight (done in HathorManager) - (3) creates the correct amount of tokens in the output (done in HathorManager) - (4) all parents must exist and have timestamp smaller than ours - (5) data field must contain at most BLOCK_DATA_MAX_SIZE bytes - """ - # TODO Should we validate a limit of outputs? - if block.is_genesis: - # TODO do genesis validation - return - - block.verify_without_storage() - - # (1) and (4) - block.verify_parents() - - block.verify_height() diff --git a/hathor/verification/block_verifier.py b/hathor/verification/block_verifier.py new file mode 100644 index 000000000..95507f90c --- /dev/null +++ b/hathor/verification/block_verifier.py @@ -0,0 +1,112 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor import daa +from hathor.profiler import get_cpu_profiler +from hathor.transaction import BaseTransaction, Block +from hathor.transaction.exceptions import ( + BlockWithInputs, + BlockWithTokensError, + InvalidBlockReward, + RewardLocked, + TransactionDataError, + WeightError, +) +from hathor.verification.vertex_verifier import VertexVerifier + +cpu = get_cpu_profiler() + + +class BlockVerifier(VertexVerifier): + __slots__ = () + + def verify_basic(self, block: Block, *, skip_block_weight_verification: bool = False) -> None: + """Partially run validations, the ones that need parents/inputs are skipped.""" + if not skip_block_weight_verification: + self.verify_weight(block) + self.verify_reward(block) + + @cpu.profiler(key=lambda _, block: 'block-verify!{}'.format(block.hash.hex())) + def verify(self, block: Block) -> None: + """ + (1) confirms at least two pending transactions and references last block + (2) solves the pow with the correct weight (done in HathorManager) + (3) creates the correct amount of tokens in the output (done in HathorManager) + (4) all parents must exist and have timestamp smaller than ours + (5) data field must contain at most BLOCK_DATA_MAX_SIZE bytes + """ + # TODO Should we validate a limit of outputs? + if block.is_genesis: + # TODO do genesis validation + return + + self.verify_without_storage(block) + + # (1) and (4) + self.verify_parents(block) + + self.verify_height(block) + + def verify_without_storage(self, block: Block) -> None: + """ Run all verifications that do not need a storage. + """ + self.verify_pow(block) + self.verify_no_inputs(block) + self.verify_outputs(block) + self.verify_data(block) + self.verify_sigops_output(block) + + @staticmethod + def verify_height(block: Block) -> None: + """Validate that the block height is enough to confirm all transactions being confirmed.""" + meta = block.get_metadata() + assert meta.height is not None + assert meta.min_height is not None + if meta.height < meta.min_height: + raise RewardLocked(f'Block needs {meta.min_height} height but has {meta.height}') + + def verify_weight(self, block: Block) -> None: + """Validate minimum block difficulty.""" + block_weight = daa.calculate_block_difficulty(block) + if block.weight < block_weight - self._settings.WEIGHT_TOL: + raise WeightError(f'Invalid new block {block.hash_hex}: weight ({block.weight}) is ' + f'smaller than the minimum weight ({block_weight})') + + @staticmethod + def verify_reward(block: Block) -> None: + """Validate reward amount.""" + parent_block = block.get_block_parent() + tokens_issued_per_block = daa.get_tokens_issued_per_block(parent_block.get_height() + 1) + if block.sum_outputs != tokens_issued_per_block: + raise InvalidBlockReward( + f'Invalid number of issued tokens tag=invalid_issued_tokens tx.hash={block.hash_hex} ' + f'issued={block.sum_outputs} allowed={tokens_issued_per_block}' + ) + + @staticmethod + def verify_no_inputs(block: Block) -> None: + inputs = getattr(block, 'inputs', None) + if inputs: + raise BlockWithInputs('number of inputs {}'.format(len(inputs))) + + def verify_outputs(self, block: BaseTransaction) -> None: + assert isinstance(block, Block) + super().verify_outputs(block) + for output in block.outputs: + if output.get_token_index() > 0: + raise BlockWithTokensError('in output: {}'.format(output.to_human_readable())) + + def verify_data(self, block: Block) -> None: + if len(block.data) > self._settings.BLOCK_DATA_MAX_SIZE: + raise TransactionDataError('block data has {} bytes'.format(len(block.data))) diff --git a/hathor/verification/merge_mined_block_verifier.py b/hathor/verification/merge_mined_block_verifier.py new file mode 100644 index 000000000..41d34bd4a --- /dev/null +++ b/hathor/verification/merge_mined_block_verifier.py @@ -0,0 +1,32 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction import Block, MergeMinedBlock +from hathor.verification.block_verifier import BlockVerifier + + +class MergeMinedBlockVerifier(BlockVerifier): + __slots__ = () + + def verify_without_storage(self, block: Block) -> None: + assert isinstance(block, MergeMinedBlock) + self.verify_aux_pow(block) + super().verify_without_storage(block) + + @staticmethod + def verify_aux_pow(block: MergeMinedBlock) -> None: + """ Verify auxiliary proof-of-work (for merged mining). + """ + assert block.aux_pow is not None + block.aux_pow.verify(block.get_base_hash()) diff --git a/hathor/verification/token_creation_transaction_verification.py b/hathor/verification/token_creation_transaction_verification.py deleted file mode 100644 index b1d9622b2..000000000 --- a/hathor/verification/token_creation_transaction_verification.py +++ /dev/null @@ -1,25 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.transaction.token_creation_tx import TokenCreationTransaction -from hathor.verification import transaction_verification - - -def verify(tx: TokenCreationTransaction, *, reject_locked_reward: bool = True) -> None: - """ Run all validations as regular transactions plus validation on token info. - - We also overload verify_sum to make some different checks - """ - transaction_verification.verify(tx, reject_locked_reward=reject_locked_reward) - tx.verify_token_info() diff --git a/hathor/verification/token_creation_transaction_verifier.py b/hathor/verification/token_creation_transaction_verifier.py new file mode 100644 index 000000000..cdb41ace7 --- /dev/null +++ b/hathor/verification/token_creation_transaction_verifier.py @@ -0,0 +1,71 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.transaction.exceptions import InvalidToken, TransactionDataError +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.transaction.transaction import TokenInfo, Transaction +from hathor.transaction.util import clean_token_string +from hathor.verification.transaction_verifier import TransactionVerifier + + +class TokenCreationTransactionVerifier(TransactionVerifier): + __slots__ = () + + def verify(self, tx: TokenCreationTransaction, *, reject_locked_reward: bool = True) -> None: + """ Run all validations as regular transactions plus validation on token info. + + We also overload verify_sum to make some different checks + """ + super().verify(tx, reject_locked_reward=reject_locked_reward) + self.verify_token_info(tx) + + def verify_sum(self, tx: Transaction) -> None: + """ Besides all checks made on regular transactions, a few extra ones are made: + - only HTR tokens on the inputs; + - new tokens are actually being minted; + + :raises InvalidToken: when there's an error in token operations + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + assert isinstance(tx, TokenCreationTransaction) + token_dict = tx.get_token_info_from_inputs() + + # we add the created token's info to token_dict, as the creation tx allows for mint/melt + assert tx.hash is not None + token_dict[tx.hash] = TokenInfo(0, True, True) + + self.update_token_info_from_outputs(tx, token_dict=token_dict) + + # make sure tokens are being minted + token_info = token_dict[tx.hash] + if token_info.amount <= 0: + raise InvalidToken('Token creation transaction must mint new tokens') + + self.verify_authorities_and_deposit(token_dict) + + def verify_token_info(self, tx: TokenCreationTransaction) -> None: + """ Validates token info + """ + name_len = len(tx.token_name) + symbol_len = len(tx.token_symbol) + if name_len == 0 or name_len > self._settings.MAX_LENGTH_TOKEN_NAME: + raise TransactionDataError('Invalid token name length ({})'.format(name_len)) + if symbol_len == 0 or symbol_len > self._settings.MAX_LENGTH_TOKEN_SYMBOL: + raise TransactionDataError('Invalid token symbol length ({})'.format(symbol_len)) + + # Can't create token with hathor name or symbol + if clean_token_string(tx.token_name) == clean_token_string(self._settings.HATHOR_TOKEN_NAME): + raise TransactionDataError('Invalid token name ({})'.format(tx.token_name)) + if clean_token_string(tx.token_symbol) == clean_token_string(self._settings.HATHOR_TOKEN_SYMBOL): + raise TransactionDataError('Invalid token symbol ({})'.format(tx.token_symbol)) diff --git a/hathor/verification/transaction_verification.py b/hathor/verification/transaction_verification.py deleted file mode 100644 index 02d887a10..000000000 --- a/hathor/verification/transaction_verification.py +++ /dev/null @@ -1,53 +0,0 @@ -# Copyright 2023 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from hathor.profiler import get_cpu_profiler -from hathor.transaction import Transaction - -cpu = get_cpu_profiler() - - -def verify_basic(transaction: Transaction) -> None: - """Partially run validations, the ones that need parents/inputs are skipped.""" - if transaction.is_genesis: - # TODO do genesis validation? - return - transaction.verify_parents_basic() - transaction.verify_weight() - transaction.verify_without_storage() - - -@cpu.profiler(key=lambda tx: 'tx-verify!{}'.format(tx.hash.hex())) -def verify(tx: Transaction, *, reject_locked_reward: bool = True) -> None: - """ Common verification for all transactions: - (i) number of inputs is at most 256 - (ii) number of outputs is at most 256 - (iii) confirms at least two pending transactions - (iv) solves the pow (we verify weight is correct in HathorManager) - (v) validates signature of inputs - (vi) validates public key and output (of the inputs) addresses - (vii) validate that both parents are valid - (viii) validate input's timestamps - (ix) validate inputs and outputs sum - """ - if tx.is_genesis: - # TODO do genesis validation - return - tx.verify_without_storage() - tx.verify_sigops_input() - tx.verify_inputs() # need to run verify_inputs first to check if all inputs exist - tx.verify_parents() - tx.verify_sum() - if reject_locked_reward: - tx.verify_reward_locked() diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py new file mode 100644 index 000000000..9ef497e05 --- /dev/null +++ b/hathor/verification/transaction_verifier.py @@ -0,0 +1,308 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor import daa +from hathor.profiler import get_cpu_profiler +from hathor.transaction import BaseTransaction, Transaction, TxInput, TxOutput +from hathor.transaction.exceptions import ( + ConflictingInputs, + DuplicatedParents, + IncorrectParents, + InexistentInput, + InputOutputMismatch, + InvalidInputData, + InvalidInputDataSize, + InvalidToken, + NoInputError, + RewardLocked, + ScriptError, + TimestampError, + TooManyInputs, + TooManySigOps, + WeightError, +) +from hathor.transaction.transaction import TokenInfo +from hathor.transaction.util import get_deposit_amount, get_withdraw_amount +from hathor.types import TokenUid, VertexId +from hathor.verification.vertex_verifier import VertexVerifier + +cpu = get_cpu_profiler() + + +class TransactionVerifier(VertexVerifier): + __slots__ = () + + def verify_basic(self, tx: Transaction) -> None: + """Partially run validations, the ones that need parents/inputs are skipped.""" + if tx.is_genesis: + # TODO do genesis validation? + return + self.verify_parents_basic(tx) + self.verify_weight(tx) + self.verify_without_storage(tx) + + @cpu.profiler(key=lambda _, tx: 'tx-verify!{}'.format(tx.hash.hex())) + def verify(self, tx: Transaction, *, reject_locked_reward: bool = True) -> None: + """ Common verification for all transactions: + (i) number of inputs is at most 256 + (ii) number of outputs is at most 256 + (iii) confirms at least two pending transactions + (iv) solves the pow (we verify weight is correct in HathorManager) + (v) validates signature of inputs + (vi) validates public key and output (of the inputs) addresses + (vii) validate that both parents are valid + (viii) validate input's timestamps + (ix) validate inputs and outputs sum + """ + if tx.is_genesis: + # TODO do genesis validation + return + self.verify_without_storage(tx) + self.verify_sigops_input(tx) + self.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist + self.verify_parents(tx) + self.verify_sum(tx) + if reject_locked_reward: + self.verify_reward_locked(tx) + + def verify_unsigned_skip_pow(self, tx: Transaction) -> None: + """ Same as .verify but skipping pow and signature verification.""" + self.verify_number_of_inputs(tx) + self.verify_number_of_outputs(tx) + self.verify_outputs(tx) + self.verify_sigops_output(tx) + self.verify_sigops_input(tx) + self.verify_inputs(tx, skip_script=True) # need to run verify_inputs first to check if all inputs exist + self.verify_parents(tx) + self.verify_sum(tx) + + @staticmethod + def verify_parents_basic(tx: Transaction) -> None: + """Verify number and non-duplicity of parents.""" + assert tx.storage is not None + + # check if parents are duplicated + parents_set = set(tx.parents) + if len(tx.parents) > len(parents_set): + raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in tx.parents]) + + if len(tx.parents) != 2: + raise IncorrectParents(f'wrong number of parents (tx type): {len(tx.parents)}, expecting 2') + + def verify_weight(self, tx: Transaction) -> None: + """Validate minimum tx difficulty.""" + min_tx_weight = daa.minimum_tx_weight(tx) + max_tx_weight = min_tx_weight + self._settings.MAX_TX_WEIGHT_DIFF + if tx.weight < min_tx_weight - self._settings.WEIGHT_TOL: + raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' + f'smaller than the minimum weight ({min_tx_weight})') + elif min_tx_weight > self._settings.MAX_TX_WEIGHT_DIFF_ACTIVATION and tx.weight > max_tx_weight: + raise WeightError(f'Invalid new tx {tx.hash_hex}: weight ({tx.weight}) is ' + f'greater than the maximum allowed ({max_tx_weight})') + + def verify_without_storage(self, tx: Transaction) -> None: + """ Run all verifications that do not need a storage. + """ + self.verify_pow(tx) + self.verify_number_of_inputs(tx) + self.verify_outputs(tx) + self.verify_sigops_output(tx) + + def verify_sigops_input(self, tx: Transaction) -> None: + """ Count sig operations on all inputs and verify that the total sum is below the limit + """ + from hathor.transaction.scripts import get_sigops_count + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + n_txops = 0 + for tx_input in tx.inputs: + try: + spent_tx = tx.get_spent_tx(tx_input) + except TransactionDoesNotExist: + raise InexistentInput('Input tx does not exist: {}'.format(tx_input.tx_id.hex())) + assert spent_tx.hash is not None + if tx_input.index >= len(spent_tx.outputs): + raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( + tx_input.tx_id.hex(), tx_input.index)) + n_txops += get_sigops_count(tx_input.data, spent_tx.outputs[tx_input.index].script) + + if n_txops > self._settings.MAX_TX_SIGOPS_INPUT: + raise TooManySigOps( + 'TX[{}]: Max number of sigops for inputs exceeded ({})'.format(tx.hash_hex, n_txops)) + + def verify_inputs(self, tx: Transaction, *, skip_script: bool = False) -> None: + """Verify inputs signatures and ownership and all inputs actually exist""" + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + spent_outputs: set[tuple[VertexId, int]] = set() + for input_tx in tx.inputs: + if len(input_tx.data) > self._settings.MAX_INPUT_DATA_SIZE: + raise InvalidInputDataSize('size: {} and max-size: {}'.format( + len(input_tx.data), self._settings.MAX_INPUT_DATA_SIZE + )) + + try: + spent_tx = tx.get_spent_tx(input_tx) + assert spent_tx.hash is not None + if input_tx.index >= len(spent_tx.outputs): + raise InexistentInput('Output spent by this input does not exist: {} index {}'.format( + input_tx.tx_id.hex(), input_tx.index)) + except TransactionDoesNotExist: + raise InexistentInput('Input tx does not exist: {}'.format(input_tx.tx_id.hex())) + + if tx.timestamp <= spent_tx.timestamp: + raise TimestampError('tx={} timestamp={}, spent_tx={} timestamp={}'.format( + tx.hash.hex() if tx.hash else None, + tx.timestamp, + spent_tx.hash.hex(), + spent_tx.timestamp, + )) + + if not skip_script: + self.verify_script(tx=tx, input_tx=input_tx, spent_tx=spent_tx) + + # check if any other input in this tx is spending the same output + key = (input_tx.tx_id, input_tx.index) + if key in spent_outputs: + raise ConflictingInputs('tx {} inputs spend the same output: {} index {}'.format( + tx.hash_hex, input_tx.tx_id.hex(), input_tx.index)) + spent_outputs.add(key) + + @staticmethod + def verify_script(*, tx: Transaction, input_tx: TxInput, spent_tx: BaseTransaction) -> None: + """ + :type tx: Transaction + :type input_tx: TxInput + :type spent_tx: Transaction + """ + from hathor.transaction.scripts import script_eval + try: + script_eval(tx, input_tx, spent_tx) + except ScriptError as e: + raise InvalidInputData(e) from e + + def verify_sum(self, tx: Transaction) -> None: + """Verify that the sum of outputs is equal of the sum of inputs, for each token. + + If there are authority UTXOs involved, tokens can be minted or melted, so the above rule may + not be respected. + + :raises InvalidToken: when there's an error in token operations + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + token_dict = tx.get_token_info_from_inputs() + self.update_token_info_from_outputs(tx, token_dict=token_dict) + self.verify_authorities_and_deposit(token_dict) + + @staticmethod + def verify_reward_locked(tx: Transaction) -> None: + """Will raise `RewardLocked` if any reward is spent before the best block height is enough, considering only + the block rewards spent by this tx itself, and not the inherited `min_height`.""" + info = tx.get_spent_reward_locked_info() + if info is not None: + raise RewardLocked(f'Reward {info.block_hash.hex()} still needs {info.blocks_needed} to be unlocked.') + + def verify_number_of_inputs(self, tx: Transaction) -> None: + """Verify number of inputs is in a valid range""" + if len(tx.inputs) > self._settings.MAX_NUM_INPUTS: + raise TooManyInputs('Maximum number of inputs exceeded') + + if len(tx.inputs) == 0: + if not tx.is_genesis: + raise NoInputError('Transaction must have at least one input') + + def verify_outputs(self, tx: BaseTransaction) -> None: + """Verify outputs reference an existing token uid in the tokens list + + :raises InvalidToken: output references non existent token uid + """ + assert isinstance(tx, Transaction) + super().verify_outputs(tx) + for output in tx.outputs: + # check index is valid + if output.get_token_index() > len(tx.tokens): + raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) + + def verify_authorities_and_deposit(self, token_dict: dict[TokenUid, TokenInfo]) -> None: + """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs + and outputs is not 0, make sure inputs have mint/melt authority. + + token_dict sums up all tokens present in the tx and their properties (amount, can_mint, can_melt) + amount = outputs - inputs, thus: + - amount < 0 when melting + - amount > 0 when minting + + :raises InputOutputMismatch: if sum of inputs is not equal to outputs and there's no mint/melt + """ + withdraw = 0 + deposit = 0 + for token_uid, token_info in token_dict.items(): + if token_uid == self._settings.HATHOR_TOKEN_UID: + continue + + if token_info.amount == 0: + # that's the usual behavior, nothing to do + pass + elif token_info.amount < 0: + # tokens have been melted + if not token_info.can_melt: + raise InputOutputMismatch('{} {} tokens melted, but there is no melt authority input'.format( + token_info.amount, token_uid.hex())) + withdraw += get_withdraw_amount(token_info.amount) + else: + # tokens have been minted + if not token_info.can_mint: + raise InputOutputMismatch('{} {} tokens minted, but there is no mint authority input'.format( + (-1) * token_info.amount, token_uid.hex())) + deposit += get_deposit_amount(token_info.amount) + + # check whether the deposit/withdraw amount is correct + htr_expected_amount = withdraw - deposit + htr_info = token_dict[self._settings.HATHOR_TOKEN_UID] + if htr_info.amount != htr_expected_amount: + raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( + htr_info.amount, + htr_expected_amount, + )) + + @staticmethod + def update_token_info_from_outputs(tx: Transaction, *, token_dict: dict[TokenUid, TokenInfo]) -> None: + """Iterate over the outputs and add values to token info dict. Updates the dict in-place. + + Also, checks if no token has authorities on the outputs not present on the inputs + + :raises InvalidToken: when there's an error in token operations + """ + # iterate over outputs and add values to token_dict + for index, tx_output in enumerate(tx.outputs): + token_uid = tx.get_token_uid(tx_output.get_token_index()) + token_info = token_dict.get(token_uid) + if token_info is None: + raise InvalidToken('no inputs for token {}'.format(token_uid.hex())) + else: + # for authority outputs, make sure the same capability (mint/melt) was present in the inputs + if tx_output.can_mint_token() and not token_info.can_mint: + raise InvalidToken('output has mint authority, but no input has it: {}'.format( + tx_output.to_human_readable())) + if tx_output.can_melt_token() and not token_info.can_melt: + raise InvalidToken('output has melt authority, but no input has it: {}'.format( + tx_output.to_human_readable())) + + if tx_output.is_token_authority(): + # make sure we only have authorities that we know of + if tx_output.value > TxOutput.ALL_AUTHORITIES: + raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) + else: + # for regular outputs, just subtract from the total amount + sum_tokens = token_info.amount + tx_output.value + token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 2a98cb662..f696e593d 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -12,15 +12,40 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.transaction import BaseTransaction, Block, Transaction, TxVersion +from typing import NamedTuple + +from hathor.conf.settings import HathorSettings +from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.exceptions import TxValidationError from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.validation_state import ValidationState -from hathor.verification import block_verification, token_creation_transaction_verification, transaction_verification +from hathor.verification.block_verifier import BlockVerifier +from hathor.verification.merge_mined_block_verifier import MergeMinedBlockVerifier +from hathor.verification.token_creation_transaction_verifier import TokenCreationTransactionVerifier +from hathor.verification.transaction_verifier import TransactionVerifier + + +class VertexVerifiers(NamedTuple): + block: BlockVerifier + merge_mined_block: MergeMinedBlockVerifier + tx: TransactionVerifier + token_creation_tx: TokenCreationTransactionVerifier + + @classmethod + def create(cls, *, settings: HathorSettings) -> 'VertexVerifiers': + return VertexVerifiers( + block=BlockVerifier(settings=settings), + merge_mined_block=MergeMinedBlockVerifier(settings=settings), + tx=TransactionVerifier(settings=settings), + token_creation_tx=TokenCreationTransactionVerifier(settings=settings), + ) class VerificationService: - __slots__ = () + __slots__ = ('verifiers', ) + + def __init__(self, *, verifiers: VertexVerifiers) -> None: + self.verifiers = verifiers def validate_basic(self, vertex: BaseTransaction, *, skip_block_weight_verification: bool = False) -> bool: """ Run basic validations (all that are possible without dependencies) and update the validation state. @@ -70,12 +95,24 @@ def verify_basic(self, vertex: BaseTransaction, *, skip_block_weight_verificatio Used by `self.validate_basic`. Should not modify the validation state.""" match vertex.version: - case TxVersion.REGULAR_BLOCK | TxVersion.MERGE_MINED_BLOCK: + case TxVersion.REGULAR_BLOCK: assert isinstance(vertex, Block) - block_verification.verify_basic(vertex, skip_block_weight_verification=skip_block_weight_verification) - case TxVersion.REGULAR_TRANSACTION | TxVersion.TOKEN_CREATION_TRANSACTION: + self.verifiers.block.verify_basic( + vertex, + skip_block_weight_verification=skip_block_weight_verification + ) + case TxVersion.MERGE_MINED_BLOCK: + assert isinstance(vertex, MergeMinedBlock) + self.verifiers.merge_mined_block.verify_basic( + vertex, + skip_block_weight_verification=skip_block_weight_verification + ) + case TxVersion.REGULAR_TRANSACTION: assert isinstance(vertex, Transaction) - transaction_verification.verify_basic(vertex) + self.verifiers.tx.verify_basic(vertex) + case TxVersion.TOKEN_CREATION_TRANSACTION: + assert isinstance(vertex, TokenCreationTransaction) + self.verifiers.token_creation_tx.verify_basic(vertex) case _: raise NotImplementedError @@ -84,15 +121,35 @@ def verify(self, vertex: BaseTransaction, *, reject_locked_reward: bool = True) Used by `self.validate_full`. Should not modify the validation state.""" match vertex.version: - case TxVersion.REGULAR_BLOCK | TxVersion.MERGE_MINED_BLOCK: + case TxVersion.REGULAR_BLOCK: + assert isinstance(vertex, Block) + self.verifiers.block.verify(vertex) + case TxVersion.MERGE_MINED_BLOCK: + assert isinstance(vertex, MergeMinedBlock) + self.verifiers.merge_mined_block.verify(vertex) + case TxVersion.REGULAR_TRANSACTION: + assert isinstance(vertex, Transaction) + self.verifiers.tx.verify(vertex, reject_locked_reward=reject_locked_reward) + case TxVersion.TOKEN_CREATION_TRANSACTION: + assert isinstance(vertex, TokenCreationTransaction) + self.verifiers.token_creation_tx.verify(vertex, reject_locked_reward=reject_locked_reward) + case _: + raise NotImplementedError + + def verify_without_storage(self, vertex: BaseTransaction) -> None: + match vertex.version: + case TxVersion.REGULAR_BLOCK: assert isinstance(vertex, Block) - block_verification.verify(vertex) + self.verifiers.block.verify_without_storage(vertex) + case TxVersion.MERGE_MINED_BLOCK: + assert isinstance(vertex, MergeMinedBlock) + self.verifiers.merge_mined_block.verify_without_storage(vertex) case TxVersion.REGULAR_TRANSACTION: assert isinstance(vertex, Transaction) - transaction_verification.verify(vertex, reject_locked_reward=reject_locked_reward) + self.verifiers.tx.verify_without_storage(vertex) case TxVersion.TOKEN_CREATION_TRANSACTION: assert isinstance(vertex, TokenCreationTransaction) - token_creation_transaction_verification.verify(vertex, reject_locked_reward=reject_locked_reward) + self.verifiers.token_creation_tx.verify_without_storage(vertex) case _: raise NotImplementedError diff --git a/hathor/verification/vertex_verifier.py b/hathor/verification/vertex_verifier.py new file mode 100644 index 000000000..af9dd75bc --- /dev/null +++ b/hathor/verification/vertex_verifier.py @@ -0,0 +1,177 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from hathor.conf.settings import HathorSettings +from hathor.transaction import BaseTransaction +from hathor.transaction.exceptions import ( + DuplicatedParents, + IncorrectParents, + InvalidOutputScriptSize, + InvalidOutputValue, + InvalidToken, + ParentDoesNotExist, + PowError, + TimestampError, + TooManyOutputs, + TooManySigOps, +) + +# tx should have 2 parents, both other transactions +_TX_PARENTS_TXS = 2 +_TX_PARENTS_BLOCKS = 0 + +# blocks have 3 parents, 2 txs and 1 block +_BLOCK_PARENTS_TXS = 2 +_BLOCK_PARENTS_BLOCKS = 1 + + +class VertexVerifier: + __slots__ = ('_settings', ) + + def __init__(self, *, settings: HathorSettings): + self._settings = settings + + def verify_parents(self, vertex: BaseTransaction) -> None: + """All parents must exist and their timestamps must be smaller than ours. + + Also, txs should have 2 other txs as parents, while blocks should have 2 txs + 1 block. + + Parents must be ordered with blocks first, followed by transactions. + + :raises TimestampError: when our timestamp is less or equal than our parent's timestamp + :raises ParentDoesNotExist: when at least one of our parents does not exist + :raises IncorrectParents: when tx does not confirm the correct number/type of parent txs + """ + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + + assert vertex.storage is not None + + # check if parents are duplicated + parents_set = set(vertex.parents) + if len(vertex.parents) > len(parents_set): + raise DuplicatedParents('Tx has duplicated parents: {}', [tx_hash.hex() for tx_hash in vertex.parents]) + + my_parents_txs = 0 # number of tx parents + my_parents_blocks = 0 # number of block parents + min_timestamp: Optional[int] = None + + for parent_hash in vertex.parents: + try: + parent = vertex.storage.get_transaction(parent_hash) + assert parent.hash is not None + if vertex.timestamp <= parent.timestamp: + raise TimestampError('tx={} timestamp={}, parent={} timestamp={}'.format( + vertex.hash_hex, + vertex.timestamp, + parent.hash_hex, + parent.timestamp, + )) + + if parent.is_block: + if vertex.is_block and not parent.is_genesis: + if vertex.timestamp - parent.timestamp > self._settings.MAX_DISTANCE_BETWEEN_BLOCKS: + raise TimestampError('Distance between blocks is too big' + ' ({} seconds)'.format(vertex.timestamp - parent.timestamp)) + if my_parents_txs > 0: + raise IncorrectParents('Parents which are blocks must come before transactions') + for pi_hash in parent.parents: + pi = vertex.storage.get_transaction(parent_hash) + if not pi.is_block: + min_timestamp = ( + min(min_timestamp, pi.timestamp) if min_timestamp is not None + else pi.timestamp + ) + my_parents_blocks += 1 + else: + if min_timestamp and parent.timestamp < min_timestamp: + raise TimestampError('tx={} timestamp={}, parent={} timestamp={}, min_timestamp={}'.format( + vertex.hash_hex, + vertex.timestamp, + parent.hash_hex, + parent.timestamp, + min_timestamp + )) + my_parents_txs += 1 + except TransactionDoesNotExist: + raise ParentDoesNotExist('tx={} parent={}'.format(vertex.hash_hex, parent_hash.hex())) + + # check for correct number of parents + if vertex.is_block: + parents_txs = _BLOCK_PARENTS_TXS + parents_blocks = _BLOCK_PARENTS_BLOCKS + else: + parents_txs = _TX_PARENTS_TXS + parents_blocks = _TX_PARENTS_BLOCKS + if my_parents_blocks != parents_blocks: + raise IncorrectParents('wrong number of parents (block type): {}, expecting {}'.format( + my_parents_blocks, parents_blocks)) + if my_parents_txs != parents_txs: + raise IncorrectParents('wrong number of parents (tx type): {}, expecting {}'.format( + my_parents_txs, parents_txs)) + + @classmethod + def verify_pow(cls, vertex: BaseTransaction, *, override_weight: Optional[float] = None) -> None: + """Verify proof-of-work + + :raises PowError: when the hash is equal or greater than the target + """ + assert vertex.hash is not None + numeric_hash = int(vertex.hash_hex, vertex.HEX_BASE) + minimum_target = vertex.get_target(override_weight) + if numeric_hash >= minimum_target: + raise PowError(f'Transaction has invalid data ({numeric_hash} < {minimum_target})') + + def verify_outputs(self, vertex: BaseTransaction) -> None: + """Verify there are no hathor authority UTXOs and outputs are all positive + + :raises InvalidToken: when there's a hathor authority utxo + :raises InvalidOutputValue: output has negative value + :raises TooManyOutputs: when there are too many outputs + """ + self.verify_number_of_outputs(vertex) + for index, output in enumerate(vertex.outputs): + # no hathor authority UTXO + if (output.get_token_index() == 0) and output.is_token_authority(): + raise InvalidToken('Cannot have authority UTXO for hathor tokens: {}'.format( + output.to_human_readable())) + + # output value must be positive + if output.value <= 0: + raise InvalidOutputValue('Output value must be a positive integer. Value: {} and index: {}'.format( + output.value, index)) + + if len(output.script) > self._settings.MAX_OUTPUT_SCRIPT_SIZE: + raise InvalidOutputScriptSize('size: {} and max-size: {}'.format( + len(output.script), self._settings.MAX_OUTPUT_SCRIPT_SIZE + )) + + def verify_number_of_outputs(self, vertex: BaseTransaction) -> None: + """Verify number of outputs does not exceeds the limit""" + if len(vertex.outputs) > self._settings.MAX_NUM_OUTPUTS: + raise TooManyOutputs('Maximum number of outputs exceeded') + + def verify_sigops_output(self, vertex: BaseTransaction) -> None: + """ Count sig operations on all outputs and verify that the total sum is below the limit + """ + from hathor.transaction.scripts import get_sigops_count + n_txops = 0 + + for tx_output in vertex.outputs: + n_txops += get_sigops_count(tx_output.script) + + if n_txops > self._settings.MAX_TX_SIGOPS_OUTPUT: + raise TooManySigOps('TX[{}]: Maximum number of sigops for all outputs exceeded ({})'.format( + vertex.hash_hex, n_txops)) diff --git a/tests/simulation/test_simulator.py b/tests/simulation/test_simulator.py index cce6c795b..a373af657 100644 --- a/tests/simulation/test_simulator.py +++ b/tests/simulation/test_simulator.py @@ -2,6 +2,7 @@ from hathor.simulator import FakeConnection from hathor.simulator.trigger import All as AllTriggers, StopWhenSynced +from hathor.verification.vertex_verifier import VertexVerifier from tests import unittest from tests.simulation.base import SimulatorTestCase @@ -12,7 +13,7 @@ def test_verify_pow(self): # just get one of the genesis, we don't really need to create any transaction tx = next(iter(manager1.tx_storage.get_all_genesis())) # optional argument must be valid, it just has to not raise any exception, there's no assert for that - tx.verify_pow(0.) + VertexVerifier.verify_pow(tx, override_weight=0.) def test_one_node(self): manager1 = self.create_peer() diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index a30759193..1db08b2de 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -1,6 +1,8 @@ from hathor.conf import HathorSettings from hathor.daa import TestMode, _set_test_mode, calculate_block_difficulty, minimum_tx_weight from hathor.transaction.storage import TransactionMemoryStorage +from hathor.verification.verification_service import VerificationService, VertexVerifiers +from hathor.verification.vertex_verifier import VertexVerifier from tests import unittest settings = HathorSettings() @@ -26,18 +28,20 @@ def get_genesis_output(): class GenesisTest(unittest.TestCase): def setUp(self): super().setUp() + verifiers = VertexVerifiers.create(settings=self._settings) + self._verification_service = VerificationService(verifiers=verifiers) self.storage = TransactionMemoryStorage() def test_pow(self): genesis = self.storage.get_all_genesis() for g in genesis: self.assertEqual(g.calculate_hash(), g.hash) - self.assertIsNone(g.verify_pow()) + self.assertIsNone(VertexVerifier.verify_pow(g)) def test_verify(self): genesis = self.storage.get_all_genesis() for g in genesis: - g.verify_without_storage() + self._verification_service.verify_without_storage(g) def test_output(self): # Test if block output is valid diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 96cc51ce2..ae932dee2 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -30,6 +30,7 @@ from hathor.transaction.scripts import P2PKH, parse_address_script from hathor.transaction.util import int_to_bytes from hathor.transaction.validation_state import ValidationState +from hathor.verification.verification_service import VertexVerifiers from hathor.wallet import Wallet from tests import unittest from tests.utils import ( @@ -46,6 +47,7 @@ class BaseTransactionTest(unittest.TestCase): def setUp(self): super().setUp() + self._verifiers = VertexVerifiers.create(settings=self._settings) self.wallet = Wallet() # this makes sure we can spend the genesis outputs @@ -80,7 +82,7 @@ def test_input_output_match(self): _input.data = P2PKH.create_input_data(public_bytes, signature) with self.assertRaises(InputOutputMismatch): - tx.verify_sum() + self._verifiers.tx.verify_sum(tx) def test_validation(self): # add 100 blocks and check that walking through get_next_block_best_chain yields the same blocks @@ -120,7 +122,7 @@ def test_script(self): _input.data = data_wrong with self.assertRaises(InvalidInputData): - tx.verify_inputs() + self._verifiers.tx.verify_inputs(tx) def test_too_many_inputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -131,13 +133,13 @@ def test_too_many_inputs(self): tx = Transaction(inputs=inputs, storage=self.tx_storage) with self.assertRaises(TooManyInputs): - tx.verify_number_of_inputs() + self._verifiers.tx.verify_number_of_inputs(tx) def test_no_inputs(self): tx = Transaction(inputs=[], storage=self.tx_storage) with self.assertRaises(NoInputError): - tx.verify_number_of_inputs() + self._verifiers.tx.verify_number_of_inputs(tx) def test_too_many_outputs(self): random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -148,7 +150,7 @@ def test_too_many_outputs(self): tx = Transaction(outputs=outputs, storage=self.tx_storage) with self.assertRaises(TooManyOutputs): - tx.verify_number_of_outputs() + self._verifiers.tx.verify_number_of_outputs(tx) def _gen_tx_spending_genesis_block(self): parents = [tx.hash for tx in self.genesis_txs] @@ -246,11 +248,11 @@ def test_merge_mined_no_magic(self): ) with self.assertRaises(AuxPowNoMagicError): - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) # adding the MAGIC_NUMBER makes it work: b.aux_pow = b.aux_pow._replace(coinbase_head=b.aux_pow.coinbase_head + MAGIC_NUMBER) - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) def test_merge_mined_multiple_magic(self): from hathor.merged_mining import MAGIC_NUMBER @@ -312,9 +314,9 @@ def test_merge_mined_multiple_magic(self): assert bytes(b1) != bytes(b2) assert b1.calculate_hash() == b2.calculate_hash() - b1.verify_aux_pow() # OK + self._verifiers.merge_mined_block.verify_aux_pow(b1) # OK with self.assertRaises(AuxPowUnexpectedMagicError): - b2.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b2) def test_merge_mined_long_merkle_path(self): from hathor.merged_mining import MAGIC_NUMBER @@ -341,11 +343,11 @@ def test_merge_mined_long_merkle_path(self): ) with self.assertRaises(AuxPowLongMerklePathError): - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) # removing one path makes it work b.aux_pow.merkle_path.pop() - b.verify_aux_pow() + self._verifiers.merge_mined_block.verify_aux_pow(b) def test_block_outputs(self): from hathor.transaction.exceptions import TooManyOutputs @@ -365,7 +367,7 @@ def test_block_outputs(self): storage=self.tx_storage) with self.assertRaises(TooManyOutputs): - block.verify_outputs() + self._verifiers.block.verify_outputs(block) def test_tx_number_parents(self): genesis_block = self.genesis_blocks[0] @@ -534,7 +536,7 @@ def test_tx_weight_too_high(self): tx.weight += self._settings.MAX_TX_WEIGHT_DIFF + 0.1 tx.update_hash() with self.assertRaises(WeightError): - tx.verify_weight() + self._verifiers.tx.verify_weight(tx) def test_weight_nan(self): # this should succeed @@ -682,34 +684,34 @@ def test_tx_methods(self): self.assertFalse(tx_equal.is_genesis) # Pow error - tx2.verify_pow() + self._verifiers.tx.verify_pow(tx2) tx2.weight = 100 with self.assertRaises(PowError): - tx2.verify_pow() + self._verifiers.tx.verify_pow(tx2) # Verify parent timestamps - tx2.verify_parents() + self._verifiers.tx.verify_parents(tx2) tx2_timestamp = tx2.timestamp tx2.timestamp = 2 with self.assertRaises(TimestampError): - tx2.verify_parents() + self._verifiers.tx.verify_parents(tx2) tx2.timestamp = tx2_timestamp # Verify inputs timestamps - tx2.verify_inputs() + self._verifiers.tx.verify_inputs(tx2) tx2.timestamp = 2 with self.assertRaises(TimestampError): - tx2.verify_inputs() + self._verifiers.tx.verify_inputs(tx2) tx2.timestamp = tx2_timestamp # Validate maximum distance between blocks block = blocks[0] block2 = blocks[1] block2.timestamp = block.timestamp + self._settings.MAX_DISTANCE_BETWEEN_BLOCKS - block2.verify_parents() + self._verifiers.block.verify_parents(block2) block2.timestamp += 1 with self.assertRaises(TimestampError): - block2.verify_parents() + self._verifiers.block.verify_parents(block2) def test_block_big_nonce(self): block = self.genesis_blocks[0] @@ -886,7 +888,7 @@ def _test_txout_script_limit(self, offset): _output = TxOutput(value, script) tx = Transaction(inputs=[_input], outputs=[_output], storage=self.tx_storage) - tx.verify_outputs() + self._verifiers.tx.verify_outputs(tx) def test_txout_script_limit_exceeded(self): with self.assertRaises(InvalidOutputScriptSize): @@ -910,7 +912,7 @@ def _test_txin_data_limit(self, offset): outputs=[_output], storage=self.tx_storage ) - tx.verify_inputs(skip_script=True) + self._verifiers.tx.verify_inputs(tx, skip_script=True) def test_txin_data_limit_exceeded(self): with self.assertRaises(InvalidInputDataSize): @@ -1063,7 +1065,7 @@ def test_sigops_output_single_below_limit(self) -> None: output3 = TxOutput(value, hscript) tx = Transaction(inputs=[_input], outputs=[output3], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_output() + self._verifiers.tx.verify_sigops_output(tx) def test_sigops_output_multi_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1075,7 +1077,7 @@ def test_sigops_output_multi_below_limit(self) -> None: output4 = TxOutput(value, hscript) tx = Transaction(inputs=[_input], outputs=[output4]*num_outputs, storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_output() + self._verifiers.tx.verify_sigops_output(tx) def test_sigops_input_single_above_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1117,7 +1119,7 @@ def test_sigops_input_single_below_limit(self) -> None: input3 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input3], outputs=[_output], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_input() + self._verifiers.tx.verify_sigops_input(tx) def test_sigops_input_multi_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1131,7 +1133,7 @@ def test_sigops_input_multi_below_limit(self) -> None: input4 = TxInput(genesis_block.hash, 0, hscript) tx = Transaction(inputs=[input4]*num_inputs, outputs=[_output], storage=self.tx_storage) tx.update_hash() - tx.verify_sigops_input() + self._verifiers.tx.verify_sigops_input(tx) def test_compare_bytes_equal(self) -> None: # create some block diff --git a/tests/tx/test_tx_deserialization.py b/tests/tx/test_tx_deserialization.py index 7e15598f3..12f2753f5 100644 --- a/tests/tx/test_tx_deserialization.py +++ b/tests/tx/test_tx_deserialization.py @@ -1,10 +1,16 @@ from hathor.transaction import Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.token_creation_tx import TokenCreationTransaction +from hathor.verification.verification_service import VerificationService, VertexVerifiers from tests import unittest class _BaseTest: class _DeserializationTest(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + verifiers = VertexVerifiers.create(settings=self._settings) + self._verification_service = VerificationService(verifiers=verifiers) + def test_deserialize(self): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes) @@ -18,7 +24,7 @@ def verbose(key, value): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes, verbose=verbose) - tx.verify_without_storage() + self._verification_service.verify_without_storage(tx) key, version = v[1] self.assertEqual(key, 'version') diff --git a/tests/wallet/test_wallet_hd.py b/tests/wallet/test_wallet_hd.py index 5c18648cb..fe0676630 100644 --- a/tests/wallet/test_wallet_hd.py +++ b/tests/wallet/test_wallet_hd.py @@ -1,6 +1,7 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address from hathor.transaction import Transaction +from hathor.verification.transaction_verifier import TransactionVerifier from hathor.wallet import HDWallet from hathor.wallet.base_wallet import WalletBalance, WalletInputInfo, WalletOutputInfo from hathor.wallet.exceptions import InsufficientFunds @@ -42,7 +43,7 @@ def test_transaction_and_balance(self): out = WalletOutputInfo(decode_address(new_address2), self.TOKENS, timelock=None) tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, [out], self.tx_storage) tx1.update_hash() - tx1.verify_script(tx1.inputs[0], block) + TransactionVerifier.verify_script(tx=tx1, input_tx=tx1.inputs[0], spent_tx=block) tx1.storage = self.tx_storage tx1.get_metadata().validation = ValidationState.FULL self.wallet.on_new_tx(tx1) @@ -62,7 +63,7 @@ def test_transaction_and_balance(self): tx2.storage = self.tx_storage tx2.update_hash() tx2.storage = self.tx_storage - tx2.verify_script(tx2.inputs[0], tx1) + TransactionVerifier.verify_script(tx=tx2, input_tx=tx2.inputs[0], spent_tx=tx1) tx2.get_metadata().validation = ValidationState.FULL self.tx_storage.save_transaction(tx2) self.wallet.on_new_tx(tx2)