diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py index c689951f5..f52ccda59 100644 --- a/hathor/builder/builder.py +++ b/hathor/builder/builder.py @@ -590,10 +590,12 @@ def _get_or_create_verification_service(self) -> VerificationService: settings = self._get_or_create_settings() verifiers = self._get_or_create_vertex_verifiers() storage = self._get_or_create_tx_storage() + nc_storage_factory = self._get_or_create_nc_storage_factory() self._verification_service = VerificationService( settings=settings, verifiers=verifiers, tx_storage=storage, + nc_storage_factory=nc_storage_factory, ) return self._verification_service diff --git a/hathor/builder/cli_builder.py b/hathor/builder/cli_builder.py index e0bb66db5..eca6c9d68 100644 --- a/hathor/builder/cli_builder.py +++ b/hathor/builder/cli_builder.py @@ -300,6 +300,7 @@ def create_manager(self, reactor: Reactor) -> HathorManager: settings=settings, verifiers=vertex_verifiers, tx_storage=tx_storage, + nc_storage_factory=self.nc_storage_factory, ) cpu_mining_service = CpuMiningService() diff --git a/hathor/cli/mining.py b/hathor/cli/mining.py index 321cd4326..c4fa4ed90 100644 --- a/hathor/cli/mining.py +++ b/hathor/cli/mining.py @@ -142,7 +142,7 @@ def execute(args: Namespace) -> None: from hathor.verification.vertex_verifiers import VertexVerifiers settings = get_global_settings() daa = DifficultyAdjustmentAlgorithm(settings=settings) - verification_params = VerificationParams.default_for_mempool() + verification_params = VerificationParams(nc_block_root_id=None, enable_checkdatasig_count=True) verifiers = VertexVerifiers.create_defaults( reactor=Mock(), settings=settings, diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py index 1307a1dd5..761734522 100644 --- a/hathor/consensus/block_consensus.py +++ b/hathor/consensus/block_consensus.py @@ -25,6 +25,7 @@ from hathor.consensus.context import ReorgInfo from hathor.feature_activation.feature import Feature from hathor.transaction import BaseTransaction, Block, Transaction +from hathor.transaction.exceptions import TokenNotFound from hathor.transaction.nc_execution_state import NCExecutionState from hathor.transaction.types import MetaNCCallRecord from hathor.util import classproperty @@ -37,6 +38,7 @@ from hathor.nanocontracts.nc_exec_logs import NCLogStorage from hathor.nanocontracts.runner import Runner from hathor.nanocontracts.runner.runner import RunnerFactory + from hathor.nanocontracts.storage import NCBlockStorage logger = get_logger() @@ -236,8 +238,17 @@ def _nc_execute_calls(self, block: Block, *, is_reorg: bool) -> None: runner = self._runner_factory.create(block_storage=block_storage, seed=seed_hasher.digest()) exception_and_tb: tuple[NCFail, str] | None = None + token_dict = tx.get_complete_token_info(block_storage) + should_verify_sum_after_execution = any(token_info.version is None for token_info in token_dict.values()) + try: runner.execute_from_tx(tx) + + # after the execution we have the latest state in the storage + # and at this point no tokens pending creation + if should_verify_sum_after_execution: + self._verify_sum_after_execution(tx, block_storage) + except NCFail as e: kwargs: dict[str, Any] = {} if tx.name: @@ -301,8 +312,21 @@ def _nc_execute_calls(self, block: Block, *, is_reorg: bool) -> None: case _: # pragma: no cover assert_never(tx_meta.nc_execution) + def _verify_sum_after_execution(self, tx: Transaction, block_storage: NCBlockStorage) -> None: + from hathor import NCFail + from hathor.verification.transaction_verifier import TransactionVerifier + try: + token_dict = tx.get_complete_token_info(block_storage) + TransactionVerifier.verify_sum(self._settings, token_dict) + except TokenNotFound as e: + # At this point, any nonexistent token would have made a prior validation fail. For example, if there + # was a withdrawal of a nonexistent token, it would have failed in the balance validation before. + raise AssertionError from e + except Exception as e: + raise NCFail from e + def nc_update_metadata(self, tx: Transaction, runner: 'Runner') -> None: - from hathor.nanocontracts.runner.types import CallType + from hathor.nanocontracts.runner.call_info import CallType meta = tx.get_metadata() assert meta.nc_execution == NCExecutionState.SUCCESS diff --git a/hathor/dag_builder/artifacts.py b/hathor/dag_builder/artifacts.py index 15cb517fa..32875fc82 100644 --- a/hathor/dag_builder/artifacts.py +++ b/hathor/dag_builder/artifacts.py @@ -85,7 +85,13 @@ def propagate_with( if new_relayed_vertex: assert manager.vertex_handler.on_new_relayed_vertex(vertex) else: - params = VerificationParams(enable_checkdatasig_count=True, enable_nano=True) + best_block = manager.tx_storage.get_best_block() + best_block_meta = best_block.get_metadata() + params = VerificationParams( + enable_checkdatasig_count=True, + enable_nano=True, + nc_block_root_id=best_block_meta.nc_block_root_id, + ) assert manager.vertex_handler._old_on_new_vertex(vertex, params) except Exception as e: raise Exception(f'failed on_new_tx({node.name})') from e diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index 30b9df4fb..ff9046004 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -216,11 +216,12 @@ def handle_contract_execution(self, tx: BaseTransaction) -> None: Update indexes according to a Nano Contract execution. Must be called only once for each time a contract is executed. """ - from hathor.nanocontracts.runner.types import ( + from hathor.nanocontracts.runner.index_records import ( + CreateContractRecord, + CreateTokenRecord, NCIndexUpdateRecord, - SyscallCreateContractRecord, - SyscallUpdateTokenRecord, UpdateAuthoritiesRecord, + UpdateTokenBalanceRecord, ) from hathor.nanocontracts.types import ContractId from hathor.transaction.nc_execution_state import NCExecutionState @@ -244,7 +245,7 @@ def handle_contract_execution(self, tx: BaseTransaction) -> None: created_contracts: set[ContractId] = set() for record in index_records: match record: - case SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): + case CreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): assert contract_id not in created_contracts, f'contract {contract_id.hex()} created multiple times' assert contract_id != first_call.contract_id, ( f'contract {contract_id.hex()} cannot make a syscall to create itself' @@ -259,26 +260,20 @@ def handle_contract_execution(self, tx: BaseTransaction) -> None: if self.blueprint_history: self.blueprint_history.add_single_key(blueprint_id, tx) - case SyscallUpdateTokenRecord(): + case CreateTokenRecord(): + if self.tokens: + self.tokens.create_token_info_from_contract( + token_uid=record.token_uid, + name=record.token_name, + symbol=record.token_symbol, + version=record.token_version, + total=record.amount, + ) + + case UpdateTokenBalanceRecord(): # Minted/melted tokens are added/removed to/from the tokens index, # and the respective destroyed/created HTR too. if self.tokens: - try: - self.tokens.get_token_info(record.token_uid) - except KeyError: - # If the token doesn't exist in the index yet, it must be a token creation syscall. - from hathor.nanocontracts.runner.types import IndexUpdateRecordType - assert record.type == IndexUpdateRecordType.CREATE_TOKEN, record.type - assert record.token_name is not None and record.token_symbol is not None - assert record.token_version is not None - - self.tokens.create_token_info_from_contract( - token_uid=record.token_uid, - name=record.token_name, - symbol=record.token_symbol, - version=record.token_version - ) - self.tokens.add_to_total(record.token_uid, record.amount) case UpdateAuthoritiesRecord(): @@ -293,11 +288,12 @@ def handle_contract_unexecution(self, tx: BaseTransaction) -> None: Update indexes according to a Nano Contract unexecution, which happens when a reorg unconfirms a nano tx. Must be called only once for each time a contract is unexecuted. """ - from hathor.nanocontracts.runner.types import ( + from hathor.nanocontracts.runner.index_records import ( + CreateContractRecord, + CreateTokenRecord, NCIndexUpdateRecord, - SyscallCreateContractRecord, - SyscallUpdateTokenRecord, UpdateAuthoritiesRecord, + UpdateTokenBalanceRecord, ) from hathor.nanocontracts.types import NC_INITIALIZE_METHOD, ContractId @@ -314,13 +310,13 @@ def handle_contract_unexecution(self, tx: BaseTransaction) -> None: if self.nc_history and call.contract_id != first_call.contract_id: self.nc_history.remove_single_key(call.contract_id, tx) - # Accumulate all syscalls. + # Accumulate all index update records. records.extend(call.index_updates) created_contracts: set[ContractId] = set() for record in records: match record: - case SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): + case CreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): assert contract_id not in created_contracts, f'contract {contract_id.hex()} created multiple times' assert contract_id != first_call.contract_id, ( f'contract {contract_id.hex()} cannot make a syscall to create itself' @@ -338,13 +334,13 @@ def handle_contract_unexecution(self, tx: BaseTransaction) -> None: if self.blueprint_history: self.blueprint_history.remove_single_key(blueprint_id, tx) - case SyscallUpdateTokenRecord(): + case CreateTokenRecord(): if self.tokens: - self.tokens.add_to_total(record.token_uid, -record.amount) + self.tokens.destroy_token(record.token_uid) - from hathor.nanocontracts.runner.types import IndexUpdateRecordType - if record.type == IndexUpdateRecordType.CREATE_TOKEN: - self.tokens.destroy_token(record.token_uid) + case UpdateTokenBalanceRecord(): + if self.tokens: + self.tokens.add_to_total(record.token_uid, -record.amount) case UpdateAuthoritiesRecord(): if self.tokens: diff --git a/hathor/indexes/rocksdb_tokens_index.py b/hathor/indexes/rocksdb_tokens_index.py index 2f2dc2482..3cb3935ff 100644 --- a/hathor/indexes/rocksdb_tokens_index.py +++ b/hathor/indexes/rocksdb_tokens_index.py @@ -28,7 +28,7 @@ to_internal_token_uid, ) from hathor.indexes.tokens_index import TokenIndexInfo, TokensIndex, TokenUtxoInfo -from hathor.nanocontracts.runner.types import UpdateAuthoritiesRecord, UpdateAuthoritiesRecordType +from hathor.nanocontracts.runner.index_records import IndexRecordType, UpdateAuthoritiesRecord from hathor.nanocontracts.types import ( NCAcquireAuthorityAction, NCDepositAction, @@ -270,7 +270,7 @@ def create_token_info_from_contract( name: str, symbol: str, version: TokenVersion, - total: int = 0, + total: int, ) -> None: self.create_token_info( token_uid=token_uid, @@ -515,13 +515,13 @@ def update_authorities_from_contract(self, record: UpdateAuthoritiesRecord, undo dict_info = self._get_value_info(record.token_uid) increment: int - match record.sub_type: - case UpdateAuthoritiesRecordType.GRANT: + match record.type: + case IndexRecordType.GRANT_AUTHORITIES: increment = 1 - case UpdateAuthoritiesRecordType.REVOKE: + case IndexRecordType.REVOKE_AUTHORITIES: increment = -1 case _: - assert_never(record.sub_type) + assert_never(record.type) if undo: increment *= -1 diff --git a/hathor/indexes/tokens_index.py b/hathor/indexes/tokens_index.py index ec3426912..52a8f3f5b 100644 --- a/hathor/indexes/tokens_index.py +++ b/hathor/indexes/tokens_index.py @@ -23,7 +23,7 @@ from hathor.transaction.token_info import TokenVersion if TYPE_CHECKING: - from hathor.nanocontracts.runner.types import UpdateAuthoritiesRecord + from hathor.nanocontracts.runner.index_records import UpdateAuthoritiesRecord SCOPE = Scope( include_blocks=False, @@ -157,7 +157,7 @@ def create_token_info_from_contract( name: str, symbol: str, version: TokenVersion, - total: int = 0, + total: int, ) -> None: """Create a token info for a new token created in a contract.""" raise NotImplementedError diff --git a/hathor/nanocontracts/balance_rules.py b/hathor/nanocontracts/balance_rules.py index c45df74f2..df3739d49 100644 --- a/hathor/nanocontracts/balance_rules.py +++ b/hathor/nanocontracts/balance_rules.py @@ -30,8 +30,7 @@ NCGrantAuthorityAction, NCWithdrawalAction, ) -from hathor.transaction.token_info import TokenInfoDict -from hathor.transaction.transaction import TokenInfo +from hathor.transaction.token_info import TokenInfoDict, TokenVersion T = TypeVar('T', bound=BaseAction) @@ -101,10 +100,13 @@ class _DepositRules(BalanceRules[NCDepositAction]): @override def verification_rule(self, token_dict: TokenInfoDict) -> None: - token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info = token_dict[self.action.token_uid] token_info.amount = token_info.amount + self.action.amount token_dict[self.action.token_uid] = token_info + if token_info.version == TokenVersion.FEE: + token_info.chargeable_outputs += 1 + @override def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: callee_changes_tracker.add_balance(self.action.token_uid, self.action.amount) @@ -125,10 +127,13 @@ class _WithdrawalRules(BalanceRules[NCWithdrawalAction]): @override def verification_rule(self, token_dict: TokenInfoDict) -> None: - token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info = token_dict[self.action.token_uid] token_info.amount = token_info.amount - self.action.amount token_dict[self.action.token_uid] = token_info + if token_info.version == TokenVersion.FEE: + token_info.chargeable_inputs += 1 + @override def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: callee_changes_tracker.add_balance(self.action.token_uid, -self.action.amount) @@ -150,7 +155,7 @@ class _GrantAuthorityRules(BalanceRules[NCGrantAuthorityAction]): @override def verification_rule(self, token_dict: TokenInfoDict) -> None: assert self.action.token_uid != HATHOR_TOKEN_UID - token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info = token_dict[self.action.token_uid] if self.action.mint and not token_info.can_mint: raise NCInvalidAction( f'{self.action.name} token {self.action.token_uid.hex()} requires mint, but no input has it' @@ -202,7 +207,7 @@ class _AcquireAuthorityRules(BalanceRules[NCAcquireAuthorityAction]): @override def verification_rule(self, token_dict: TokenInfoDict) -> None: assert self.action.token_uid != HATHOR_TOKEN_UID - token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info = token_dict[self.action.token_uid] token_info.can_mint = token_info.can_mint or self.action.mint token_info.can_melt = token_info.can_melt or self.action.melt token_dict[self.action.token_uid] = token_info diff --git a/hathor/nanocontracts/blueprint_env.py b/hathor/nanocontracts/blueprint_env.py index 6845d7051..3c41b0c41 100644 --- a/hathor/nanocontracts/blueprint_env.py +++ b/hathor/nanocontracts/blueprint_env.py @@ -17,7 +17,6 @@ from typing import TYPE_CHECKING, Any, Collection, Sequence, TypeAlias, final from hathor.conf.settings import HATHOR_TOKEN_UID -from hathor.nanocontracts.storage import NCContractStorage from hathor.nanocontracts.types import Amount, BlueprintId, ContractId, NCAction, NCFee, TokenUid if TYPE_CHECKING: @@ -27,6 +26,7 @@ from hathor.nanocontracts.proxy_accessor import ProxyAccessor from hathor.nanocontracts.rng import NanoRNG from hathor.nanocontracts.runner import Runner + from hathor.nanocontracts.storage import NCContractStorage NCAttrCache: TypeAlias = dict[bytes, Any] | None diff --git a/hathor/nanocontracts/nc_types/token_version_nc_type.py b/hathor/nanocontracts/nc_types/token_version_nc_type.py new file mode 100644 index 000000000..bf9300292 --- /dev/null +++ b/hathor/nanocontracts/nc_types/token_version_nc_type.py @@ -0,0 +1,26 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing_extensions import override + +from hathor.nanocontracts.nc_types.sized_int_nc_type import Uint8NCType +from hathor.serialization import Deserializer +from hathor.transaction.token_info import TokenVersion + + +class TokenVersionNCType(Uint8NCType): + @override + def _deserialize(self, deserializer: Deserializer, /) -> TokenVersion: + value = super()._deserialize(deserializer) + return TokenVersion(value) diff --git a/hathor/nanocontracts/runner/types.py b/hathor/nanocontracts/runner/call_info.py similarity index 50% rename from hathor/nanocontracts/runner/types.py rename to hathor/nanocontracts/runner/call_info.py index 67477e466..ecd187e61 100644 --- a/hathor/nanocontracts/runner/types.py +++ b/hathor/nanocontracts/runner/call_info.py @@ -16,15 +16,13 @@ from dataclasses import dataclass, field from enum import StrEnum, auto, unique -from typing import TYPE_CHECKING, Any, TypeAlias - -from typing_extensions import Literal, Self, assert_never +from typing import TYPE_CHECKING, Any from hathor.nanocontracts.context import Context from hathor.nanocontracts.exception import NCNumberOfCallsExceeded, NCRecursionError +from hathor.nanocontracts.runner.index_records import NCIndexUpdateRecord from hathor.nanocontracts.storage import NCChangesTracker, NCContractStorage -from hathor.nanocontracts.types import BlueprintId, ContractId, TokenUid, VertexId -from hathor.transaction.token_info import TokenVersion +from hathor.nanocontracts.types import BlueprintId, ContractId if TYPE_CHECKING: from hathor.nanocontracts.nc_exec_logs import NCLogger @@ -36,141 +34,6 @@ class CallType(StrEnum): VIEW = auto() -@unique -class IndexUpdateRecordType(StrEnum): - CREATE_CONTRACT = auto() - MINT_TOKENS = auto() - MELT_TOKENS = auto() - CREATE_TOKEN = auto() - UPDATE_AUTHORITIES = auto() - - -@dataclass(slots=True, frozen=True, kw_only=True) -class SyscallCreateContractRecord: - blueprint_id: BlueprintId - contract_id: ContractId - - def to_json(self) -> dict[str, Any]: - return dict( - type=IndexUpdateRecordType.CREATE_CONTRACT, - blueprint_id=self.blueprint_id.hex(), - contract_id=self.contract_id.hex(), - ) - - @classmethod - def from_json(cls, json_dict: dict[str, Any]) -> Self: - assert json_dict['type'] == IndexUpdateRecordType.CREATE_CONTRACT - return cls( - contract_id=ContractId(VertexId(bytes.fromhex(json_dict['contract_id']))), - blueprint_id=BlueprintId(VertexId(bytes.fromhex(json_dict['blueprint_id']))), - ) - - -@dataclass(slots=True, frozen=True, kw_only=True) -class SyscallUpdateTokenRecord: - """Record for token balance updates in syscalls. - - This record represents a single token operation (mint, melt, or create). - Each syscall may generate multiple records (e.g., main token + fee payment token). - """ - token_uid: TokenUid - amount: int - type: ( - Literal[IndexUpdateRecordType.MINT_TOKENS] - | Literal[IndexUpdateRecordType.MELT_TOKENS] - | Literal[IndexUpdateRecordType.CREATE_TOKEN] - ) - # Optional fields used for CREATE_TOKEN operations - token_symbol: str | None = None - token_name: str | None = None - token_version: TokenVersion | None = None - - def to_json(self) -> dict[str, Any]: - return dict( - type=self.type, - token_uid=self.token_uid.hex(), - amount=self.amount, - token_name=self.token_name, - token_symbol=self.token_symbol, - token_version=self.token_version, - ) - - @classmethod - def from_json(cls, json_dict: dict[str, Any]) -> Self: - valid_types = ( - IndexUpdateRecordType.MINT_TOKENS, IndexUpdateRecordType.MELT_TOKENS, IndexUpdateRecordType.CREATE_TOKEN - ) - assert json_dict['type'] in valid_types - return cls( - type=json_dict['type'], - token_uid=TokenUid(VertexId(bytes.fromhex(json_dict['token_uid']))), - amount=json_dict['amount'], - token_version=json_dict.get('token_version'), - token_name=json_dict.get('token_name'), - token_symbol=json_dict.get('token_symbol'), - ) - - -@unique -class UpdateAuthoritiesRecordType(StrEnum): - GRANT = auto() - REVOKE = auto() - - -@dataclass(slots=True, frozen=True, kw_only=True) -class UpdateAuthoritiesRecord: - token_uid: TokenUid - sub_type: UpdateAuthoritiesRecordType - mint: bool - melt: bool - - def __post_init__(self) -> None: - assert self.mint or self.melt - - def to_json(self) -> dict[str, Any]: - return dict( - type=IndexUpdateRecordType.UPDATE_AUTHORITIES, - token_uid=self.token_uid.hex(), - sub_type=self.sub_type, - mint=self.mint, - melt=self.melt, - ) - - @classmethod - def from_json(cls, json_dict: dict[str, Any]) -> Self: - assert json_dict['type'] == IndexUpdateRecordType.UPDATE_AUTHORITIES - return cls( - token_uid=TokenUid(VertexId(bytes.fromhex(json_dict['token_uid']))), - sub_type=UpdateAuthoritiesRecordType(json_dict['sub_type']), - mint=json_dict['mint'], - melt=json_dict['melt'], - ) - - -NCIndexUpdateRecord: TypeAlias = ( - SyscallCreateContractRecord | - SyscallUpdateTokenRecord | - UpdateAuthoritiesRecord -) - - -def nc_index_update_record_from_json(json_dict: dict[str, Any]) -> NCIndexUpdateRecord: - syscall_type = IndexUpdateRecordType(json_dict['type']) - match syscall_type: - case IndexUpdateRecordType.CREATE_CONTRACT: - return SyscallCreateContractRecord.from_json(json_dict) - case ( - IndexUpdateRecordType.MINT_TOKENS - | IndexUpdateRecordType.MELT_TOKENS - | IndexUpdateRecordType.CREATE_TOKEN - ): - return SyscallUpdateTokenRecord.from_json(json_dict) - case IndexUpdateRecordType.UPDATE_AUTHORITIES: - return UpdateAuthoritiesRecord.from_json(json_dict) - case _: - raise assert_never(f'invalid syscall record type: "{syscall_type}"') - - @dataclass(slots=True, frozen=True, kw_only=True) class CallRecord: """This object keeps information about a single call between contracts.""" diff --git a/hathor/nanocontracts/runner/index_records.py b/hathor/nanocontracts/runner/index_records.py new file mode 100644 index 000000000..2b331be99 --- /dev/null +++ b/hathor/nanocontracts/runner/index_records.py @@ -0,0 +1,121 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import annotations + +from dataclasses import dataclass +from enum import StrEnum, auto, unique +from typing import Any, TypeAlias + +from typing_extensions import Literal, assert_never + +from hathor.nanocontracts.nc_types import NCType +from hathor.nanocontracts.nc_types.dataclass_nc_type import make_dataclass_nc_type +from hathor.nanocontracts.types import BlueprintId, ContractId, TokenUid +from hathor.transaction.token_info import TokenVersion + + +@unique +class IndexRecordType(StrEnum): + CREATE_CONTRACT = auto() + CREATE_TOKEN = auto() + UPDATE_TOKEN_BALANCE = auto() + GRANT_AUTHORITIES = auto() + REVOKE_AUTHORITIES = auto() + + +@dataclass(slots=True, frozen=True, kw_only=True) +class CreateContractRecord: + """Record for contract creation.""" + type: Literal[IndexRecordType.CREATE_CONTRACT] + blueprint_id: BlueprintId + contract_id: ContractId + + def __post_init__(self) -> None: + assert self.type == IndexRecordType.CREATE_CONTRACT + + +@dataclass(slots=True, frozen=True, kw_only=True) +class CreateTokenRecord: + """Record for token creation.""" + type: Literal[IndexRecordType.CREATE_TOKEN] + token_uid: TokenUid + amount: int + token_symbol: str + token_name: str + token_version: Literal[TokenVersion.DEPOSIT] | Literal[TokenVersion.FEE] + + def __post_init__(self) -> None: + assert self.type == IndexRecordType.CREATE_TOKEN + assert self.token_version in (TokenVersion.DEPOSIT, TokenVersion.FEE) + assert self.amount > 0 + + +@dataclass(slots=True, frozen=True, kw_only=True) +class UpdateTokenBalanceRecord: + """Record for token balance updates.""" + type: Literal[IndexRecordType.UPDATE_TOKEN_BALANCE] + token_uid: TokenUid + amount: int + + def __post_init__(self) -> None: + assert self.type == IndexRecordType.UPDATE_TOKEN_BALANCE + + +@dataclass(slots=True, frozen=True, kw_only=True) +class UpdateAuthoritiesRecord: + """Record for token authority updates.""" + type: Literal[IndexRecordType.GRANT_AUTHORITIES] | Literal[IndexRecordType.REVOKE_AUTHORITIES] + token_uid: TokenUid + mint: bool + melt: bool + + def __post_init__(self) -> None: + assert self.type in (IndexRecordType.GRANT_AUTHORITIES, IndexRecordType.REVOKE_AUTHORITIES) + assert self.mint or self.melt + + +NCIndexUpdateRecord: TypeAlias = ( + CreateContractRecord | CreateTokenRecord | UpdateTokenBalanceRecord | UpdateAuthoritiesRecord +) + +CreateContractRecordNCType = make_dataclass_nc_type(CreateContractRecord) +CreateTokenRecordNCType = make_dataclass_nc_type(CreateTokenRecord) +UpdateTokenBalanceRecordNCType = make_dataclass_nc_type(UpdateTokenBalanceRecord) +UpdateAuthoritiesRecordNCType = make_dataclass_nc_type(UpdateAuthoritiesRecord) + + +def _get_nc_type(record_type: IndexRecordType) -> NCType: + match record_type: + case IndexRecordType.CREATE_CONTRACT: + return CreateContractRecordNCType + case IndexRecordType.CREATE_TOKEN: + return CreateTokenRecordNCType + case IndexRecordType.UPDATE_TOKEN_BALANCE: + return UpdateTokenBalanceRecordNCType + case IndexRecordType.GRANT_AUTHORITIES | IndexRecordType.REVOKE_AUTHORITIES: + return UpdateAuthoritiesRecordNCType + case _: + assert_never(record_type) + + +def nc_index_update_record_from_json(json_dict: dict[str, Any]) -> NCIndexUpdateRecord: + record_type = IndexRecordType(json_dict['type']) + nc_type = _get_nc_type(record_type) + return nc_type.json_to_value(json_dict) + + +def nc_index_update_record_to_json(record: NCIndexUpdateRecord) -> dict: + nc_type = _get_nc_type(record.type) + return nc_type.value_to_json(record) diff --git a/hathor/nanocontracts/runner/runner.py b/hathor/nanocontracts/runner/runner.py index bb4e1d471..9cbe45207 100644 --- a/hathor/nanocontracts/runner/runner.py +++ b/hathor/nanocontracts/runner/runner.py @@ -21,7 +21,8 @@ from typing_extensions import assert_never -from hathor.conf.settings import HATHOR_TOKEN_UID, HathorSettings +from hathor import HATHOR_TOKEN_UID +from hathor.conf.settings import HathorSettings from hathor.nanocontracts.balance_rules import BalanceRules from hathor.nanocontracts.blueprint import Blueprint from hathor.nanocontracts.blueprint_env import BlueprintEnvironment @@ -47,18 +48,16 @@ from hathor.nanocontracts.metered_exec import MeteredExecutor from hathor.nanocontracts.method import Method, ReturnOnly from hathor.nanocontracts.rng import NanoRNG -from hathor.nanocontracts.runner.types import ( - CallInfo, - CallRecord, - CallType, - SyscallCreateContractRecord, - SyscallUpdateTokenRecord, +from hathor.nanocontracts.runner.call_info import CallInfo, CallRecord, CallType +from hathor.nanocontracts.runner.index_records import ( + CreateContractRecord, + IndexRecordType, UpdateAuthoritiesRecord, - UpdateAuthoritiesRecordType, + UpdateTokenBalanceRecord, ) from hathor.nanocontracts.storage import NCBlockStorage, NCChangesTracker, NCContractStorage, NCStorageFactory from hathor.nanocontracts.storage.contract_storage import Balance -from hathor.nanocontracts.syscall_token_balance_rules import TokenSyscallBalanceRules +from hathor.nanocontracts.token_fees import calculate_melt_fee, calculate_mint_fee from hathor.nanocontracts.types import ( NC_ALLOW_REENTRANCY, NC_ALLOWED_ACTIONS_ATTR, @@ -437,7 +436,14 @@ def _unsafe_call_another_contract_public_method( # Update the balances with the fee payment amount. Since some tokens could be created during contract # execution, the verification of the tokens and amounts will be done after it for fee in fees: - previous_changes_tracker.add_balance(fee.token_uid, -fee.amount) + assert fee.amount > 0 + self._update_tokens_amount([ + UpdateTokenRecord( + token_uid=fee.token_uid, + amount=-fee.amount, + type=IndexUpdateRecordType.MELT_TOKENS + ) + ]) self._register_paid_fee(fee.token_uid, fee.amount) ctx_actions = Context.__group_actions__(actions) @@ -501,10 +507,10 @@ def _validate_balances(self, ctx: Context) -> None: continue for record in call.index_updates: match record: - case SyscallCreateContractRecord() | UpdateAuthoritiesRecord(): + case CreateContractRecord() | UpdateAuthoritiesRecord(): # Nothing to do here. pass - case SyscallUpdateTokenRecord(): + case UpdateTokenRecord(): calculated_tokens_totals[record.token_uid] += record.amount case _: # pragma: no cover assert_never(record) @@ -534,10 +540,6 @@ def _validate_balances(self, ctx: Context) -> None: case _: # pragma: no cover assert_never(action) - # Account for fees paid during execution - for fee_token_uid, amount in self._paid_actions_fees.items(): - total_diffs[fee_token_uid] += amount - assert all(diff == 0 for diff in total_diffs.values()), ( f'change tracker diffs do not match actions: {total_diffs}' ) @@ -799,7 +801,7 @@ def _get_balance( ) -> Balance: """Internal implementation of get_balance.""" if token_uid is None: - token_uid = TokenUid(HATHOR_TOKEN_UID) + token_uid = HATHOR_TOKEN_UID storage: NCContractStorage if self._call_info is not None and contract_id == self.get_current_contract_id(): @@ -928,7 +930,7 @@ def syscall_create_another_contract( ) assert last_call_record.index_updates is not None - syscall_record = SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=child_id) + syscall_record = CreateContractRecord(blueprint_id=blueprint_id, contract_id=child_id) last_call_record.index_updates.append(syscall_record) return child_id, ret @@ -971,7 +973,7 @@ def syscall_mint_tokens( *, token_uid: TokenUid, amount: int, - fee_payment_token: TokenUid = TokenUid(HATHOR_TOKEN_UID) + fee_payment_token: TokenUid = HATHOR_TOKEN_UID ) -> None: """Mint tokens and adds them to the balance of this nano contract. The tokens should be already created otherwise it will raise. @@ -990,14 +992,13 @@ def syscall_mint_tokens( if not balance.can_mint: raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot mint {token_uid.hex()} tokens') - fee_payment_token_info = self._get_token(fee_payment_token) token_info = self._get_token(token_uid) - - syscall_rules = TokenSyscallBalanceRules.get_rules(token_uid, token_info.token_version, self._settings) - syscall_balance = syscall_rules.mint(amount, fee_payment_token=fee_payment_token_info) - records = syscall_rules.get_syscall_update_token_records(syscall_balance) - - self._update_tokens_amount(records) + self._mint_tokens( + token_version=token_info.token_version, + token_uid=TokenUid(token_info.token_id), + amount=amount, + fee_payment_token=self._get_token(fee_payment_token), + ) @_forbid_syscall_from_view('melt_tokens') def syscall_melt_tokens( @@ -1005,7 +1006,7 @@ def syscall_melt_tokens( *, token_uid: TokenUid, amount: int, - fee_payment_token: TokenUid = TokenUid(HATHOR_TOKEN_UID) + fee_payment_token: TokenUid = HATHOR_TOKEN_UID ) -> None: """Melt tokens by removing them from the balance of this nano contract. The tokens should be already created otherwise it will raise. @@ -1025,13 +1026,12 @@ def syscall_melt_tokens( raise NCInvalidSyscall(f'contract {call_record.contract_id.hex()} cannot melt {token_uid.hex()} tokens') token_info = self._get_token(token_uid) - fee_payment_token_info = self._get_token(fee_payment_token) - - syscall_rules = TokenSyscallBalanceRules.get_rules(token_uid, token_info.token_version, self._settings) - syscall_balance = syscall_rules.melt(amount, fee_payment_token=fee_payment_token_info) - records = syscall_rules.get_syscall_update_token_records(syscall_balance) - - self._update_tokens_amount(records) + self._melt_tokens( + token_version=token_info.token_version, + token_uid=TokenUid(token_info.token_id), + amount=amount, + fee_payment_token=self._get_token(fee_payment_token), + ) def _validate_context(self, ctx: Context) -> None: """Check whether the context is valid.""" @@ -1111,17 +1111,14 @@ def syscall_create_child_deposit_token( grant_melt=melt_authority, ) - syscall_rules = TokenSyscallBalanceRules.get_rules(token_id, token_version, self._settings) - syscall_balance = syscall_rules.create_token( + self._create_token( + token_version=token_version, token_uid=token_id, - token_symbol=token_symbol, - token_name=token_name, amount=amount, - fee_payment_token=self._get_token(TokenUid(HATHOR_TOKEN_UID)) + fee_payment_token=self._get_token(HATHOR_TOKEN_UID), + token_name=token_name, + token_symbol=token_symbol, ) - records = syscall_rules.get_syscall_update_token_records(syscall_balance) - - self._update_tokens_amount(records) return token_id @@ -1151,7 +1148,6 @@ def syscall_create_child_fee_token( parent_id = call_record.contract_id cleaned_token_symbol = clean_token_string(token_symbol) - fee_payment_token_info = self._get_token(fee_payment_token) token_id = derive_child_token_id(parent_id, cleaned_token_symbol, salt=salt) token_version = TokenVersion.FEE @@ -1167,17 +1163,15 @@ def syscall_create_child_fee_token( grant_mint=mint_authority, grant_melt=melt_authority, ) - syscall_rules = TokenSyscallBalanceRules.get_rules(token_id, token_version, self._settings) - syscall_balance = syscall_rules.create_token( + + self._create_token( + token_version=token_version, token_uid=token_id, + amount=amount, + fee_payment_token=self._get_token(fee_payment_token), token_symbol=token_symbol, token_name=token_name, - amount=amount, - fee_payment_token=fee_payment_token_info ) - records = syscall_rules.get_syscall_update_token_records(syscall_balance) - - self._update_tokens_amount(records) return token_id @@ -1246,10 +1240,99 @@ def _get_token(self, token_uid: TokenUid) -> TokenDescription: token_id=token_creation_tx.hash ) - def _update_tokens_amount( + def _create_token( self, - records: list[SyscallUpdateTokenRecord] + *, + token_version: TokenVersion, + token_uid: TokenUid, + amount: int, + fee_payment_token: TokenDescription, + token_symbol: str, + token_name: str, ) -> None: + """Create a new token.""" + fee_amount = calculate_mint_fee( + settings=self._settings, + token_version=token_version, + amount=amount, + fee_payment_token=fee_payment_token, + ) + assert amount > 0 and fee_amount < 0 + + record = UpdateTokenRecord( + type=IndexUpdateRecordType.CREATE_TOKEN, + token_uid=token_uid, + amount=amount, + fee_token_uid=TokenUid(fee_payment_token.token_id), + token_version=token_version, + token_symbol=token_symbol, + token_name=token_name, + fee_amount=fee_amount, + ) + self._update_tokens_amount(record) + + def _mint_tokens( + self, + *, + token_version: TokenVersion, + token_uid: TokenUid, + amount: int, + fee_payment_token: TokenDescription, + ) -> None: + """Mint tokens.""" + fee_amount = calculate_mint_fee( + settings=self._settings, + token_version=token_version, + amount=amount, + fee_payment_token=fee_payment_token, + ) + assert amount > 0 and fee_amount < 0 + + record = UpdateTokenRecord( + type=IndexUpdateRecordType.MINT_TOKENS, + token_uid=token_uid, + amount=amount, + fee_token_uid=TokenUid(fee_payment_token.token_id), + fee_amount=fee_amount, + ) + self._update_tokens_amount(record) + + def _melt_tokens( + self, + *, + token_version: TokenVersion, + token_uid: TokenUid, + amount: int, + fee_payment_token: TokenDescription, + ) -> None: + """Melt tokens.""" + fee_amount = calculate_melt_fee( + settings=self._settings, + token_version=token_version, + amount=amount, + fee_payment_token=fee_payment_token, + ) + assert amount > 0 + match token_version: + case TokenVersion.NATIVE: + raise AssertionError + case TokenVersion.DEPOSIT: + assert fee_amount > 0 + case TokenVersion.FEE: + assert fee_amount < 0 + case _: # pragma: no cover + assert_never(token_version) + + record = UpdateTokenRecord( + type=IndexUpdateRecordType.MELT_TOKENS, + token_uid=token_uid, + amount=-amount, + fee_token_uid=TokenUid(fee_payment_token.token_id), + fee_amount=fee_amount, + ) + self._update_tokens_amount(record) + + def _update_tokens_amount(self, records: list[UpdateTokenRecord]) -> None: """ Update token balances and create index records for a token operation. diff --git a/hathor/nanocontracts/storage/block_storage.py b/hathor/nanocontracts/storage/block_storage.py index 106f44d27..36ecf0621 100644 --- a/hathor/nanocontracts/storage/block_storage.py +++ b/hathor/nanocontracts/storage/block_storage.py @@ -19,7 +19,7 @@ from hathor.nanocontracts.exception import NanoContractDoesNotExist from hathor.nanocontracts.nc_types.dataclass_nc_type import make_dataclass_nc_type -from hathor.nanocontracts.nc_types.sized_int_nc_type import Uint8NCType +from hathor.nanocontracts.nc_types.token_version_nc_type import TokenVersionNCType from hathor.nanocontracts.storage.contract_storage import NCContractStorage from hathor.nanocontracts.storage.patricia_trie import NodeId, PatriciaTrie from hathor.nanocontracts.storage.token_proxy import TokenProxy @@ -64,7 +64,7 @@ class NCBlockStorage: _TOKEN_DESCRIPTION_NC_TYPE = make_dataclass_nc_type( TokenDescription, extra_nc_types_map={ - TokenVersion: Uint8NCType, + TokenVersion: TokenVersionNCType, }, ) diff --git a/hathor/nanocontracts/syscall_token_balance_rules.py b/hathor/nanocontracts/syscall_token_balance_rules.py deleted file mode 100644 index 567fe27b1..000000000 --- a/hathor/nanocontracts/syscall_token_balance_rules.py +++ /dev/null @@ -1,368 +0,0 @@ -# Copyright 2025 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from __future__ import annotations - -from abc import ABC, abstractmethod -from dataclasses import dataclass -from enum import StrEnum, auto, unique -from typing import TYPE_CHECKING - -from typing_extensions import Literal, assert_never - -from hathor.conf.settings import HATHOR_TOKEN_UID, HathorSettings -from hathor.nanocontracts.exception import NCInvalidFeePaymentToken -from hathor.nanocontracts.runner.types import IndexUpdateRecordType -from hathor.nanocontracts.types import TokenUid -from hathor.transaction.token_info import TokenDescription, TokenVersion -from hathor.transaction.util import get_deposit_token_deposit_amount, get_deposit_token_withdraw_amount - -if TYPE_CHECKING: - from hathor.nanocontracts.runner.types import SyscallUpdateTokenRecord - - -@unique -class TokenOperationType(StrEnum): - """Types of token operations for syscalls.""" - CREATE = auto() - MINT = auto() - MELT = auto() - - -def to_index_update_type(op_type: TokenOperationType) -> ( - Literal[IndexUpdateRecordType.MINT_TOKENS] - | Literal[IndexUpdateRecordType.MELT_TOKENS] - | Literal[IndexUpdateRecordType.CREATE_TOKEN] -): - """Convert TokenOperationType to IndexUpdateRecordType for compatibility.""" - match op_type: - case TokenOperationType.CREATE: - return IndexUpdateRecordType.CREATE_TOKEN - case TokenOperationType.MINT: - return IndexUpdateRecordType.MINT_TOKENS - case TokenOperationType.MELT: - return IndexUpdateRecordType.MELT_TOKENS - case _: - assert_never(op_type) - - -@dataclass(slots=True, kw_only=True) -class TokenSyscallBalanceEntry: - token_uid: TokenUid - amount: int - - -@dataclass(slots=True, kw_only=True) -class TokenSyscallBalance: - type: TokenOperationType - token: TokenSyscallBalanceEntry - fee_payment: TokenSyscallBalanceEntry - # create token syscall - token_version: TokenVersion | None = None - token_symbol: str | None = None - token_name: str | None = None - - def to_syscall_records(self) -> list['SyscallUpdateTokenRecord']: - """ - Convert TokenSyscallBalance to a list of SyscallUpdateTokenRecord. - - Each operation generates two records: - 1. Main token operation (mint/melt/create) - 2. Fee payment token operation - - Returns: - A list with two SyscallUpdateTokenRecord instances - """ - from hathor.nanocontracts.runner.types import SyscallUpdateTokenRecord - - operation_type = to_index_update_type(self.type) - - # First record: main token operation - main_token_record = SyscallUpdateTokenRecord( - type=operation_type, - token_uid=self.token.token_uid, - amount=self.token.amount, - token_version=self.token_version, - token_symbol=self.token_symbol, - token_name=self.token_name, - ) - - # Second record: fee payment token - fee_payment_record = SyscallUpdateTokenRecord( - type=operation_type, - token_uid=self.fee_payment.token_uid, - amount=self.fee_payment.amount, - ) - - return [main_token_record, fee_payment_record] - - -class TokenSyscallBalanceRules(ABC): - """ - An abstract base class that unifies token balance rules for syscalls. - - Requires definitions for create tokens, mint, and melt syscalls. - """ - - __slots__ = ('_settings', 'token_version', 'token_uid') - - def __init__( - self, - settings: HathorSettings, - token_uid: TokenUid, - token_version: TokenVersion - ) -> None: - self._settings = settings - self.token_version = token_version - self.token_uid = token_uid - - assert token_uid != TokenUid(HATHOR_TOKEN_UID) - assert token_version is not TokenVersion.NATIVE - - @abstractmethod - def create_token( - self, - *, - token_uid: TokenUid, - token_symbol: str, - token_name: str, - amount: int, - fee_payment_token: TokenDescription - ) -> TokenSyscallBalance: - """ - Calculate and return the token amounts needed for token creation syscalls. - - Returns: - `TokenSyscallBalance` with the token data and the amounts - """ - raise NotImplementedError - - @abstractmethod - def mint(self, amount: int, *, fee_payment_token: TokenDescription) -> TokenSyscallBalance: - """ - Calculate and return the token amounts needed for minting operations. - - Args: - amount: The amount to be minted. - fee_payment_token: The token that will be used to pay fees - - Returns: - TokenSyscallBalance: A data class with the current syscall record type, token UIDs, and - their respective amounts that will be used by the Runner class for balance updates during token minting. - """ - raise NotImplementedError - - @abstractmethod - def melt(self, amount: int, *, fee_payment_token: TokenDescription) -> TokenSyscallBalance: - """ - Calculate and return the token amounts needed for melting operations. - - Args: - amount: The amount to be melted. - fee_payment_token: The token that will be used to pay fees - - Returns: - TokenSyscallBalance: A data class with the current syscall record type, token UIDs, and - their respective amounts that will be used by the Runner class for balance updates during token melting. - """ - raise NotImplementedError - - @abstractmethod - def get_syscall_update_token_records( - self, - syscall_balance: TokenSyscallBalance - ) -> list['SyscallUpdateTokenRecord']: - """ - Create syscall update records for the given token operation. - - This method transforms a TokenSyscallBalance into a list of SyscallUpdateTokenRecord - that will be appended to the call record's index_updates for tracking token operations. - - Args: - syscall_balance: The token balance operation containing operation type, - token amounts, and payment details. - - Returns: - A list of syscall update records (main token + fee payment). - """ - raise NotImplementedError - - @staticmethod - def get_rules( - token_uid: TokenUid, - token_version: TokenVersion, - settings: HathorSettings - ) -> TokenSyscallBalanceRules: - """Get the balance rules instance for the provided token version.""" - match token_version: - case TokenVersion.DEPOSIT: - return _DepositTokenRules( - settings, - token_uid, - token_version, - ) - case TokenVersion.FEE: - return _FeeTokenRules( - settings, - token_uid, - token_version, - ) - case TokenVersion.NATIVE: - raise AssertionError(f"NATIVE token version is not supported for token {token_uid.hex()}") - case _: - assert_never(token_version) - - -class _DepositTokenRules(TokenSyscallBalanceRules): - - def create_token( - self, - *, - token_uid: TokenUid, - token_symbol: str, - token_name: str, - amount: int, - fee_payment_token: TokenDescription - ) -> TokenSyscallBalance: - assert amount > 0 - self._validate_payment_token(fee_payment_token) - htr_amount = -get_deposit_token_deposit_amount(self._settings, amount) - - return TokenSyscallBalance( - type=TokenOperationType.CREATE, - token_version=TokenVersion.DEPOSIT, - token_name=token_name, - token_symbol=token_symbol, - token=TokenSyscallBalanceEntry(token_uid=self.token_uid, amount=amount), - fee_payment=TokenSyscallBalanceEntry(token_uid=TokenUid(fee_payment_token.token_id), amount=htr_amount) - ) - - def mint(self, amount: int, *, fee_payment_token: TokenDescription) -> TokenSyscallBalance: - assert amount > 0 - self._validate_payment_token(fee_payment_token) - htr_amount = -get_deposit_token_deposit_amount(self._settings, amount) - - return TokenSyscallBalance( - type=TokenOperationType.MINT, - token=TokenSyscallBalanceEntry(token_uid=self.token_uid, amount=amount), - fee_payment=TokenSyscallBalanceEntry(token_uid=TokenUid(fee_payment_token.token_id), amount=htr_amount) - ) - - def melt(self, amount: int, *, fee_payment_token: TokenDescription) -> TokenSyscallBalance: - assert amount > 0 - self._validate_payment_token(fee_payment_token) - htr_amount = +get_deposit_token_withdraw_amount(self._settings, amount) - - return TokenSyscallBalance( - type=TokenOperationType.MELT, - token=TokenSyscallBalanceEntry(token_uid=self.token_uid, amount=-amount), - fee_payment=TokenSyscallBalanceEntry(token_uid=TokenUid(fee_payment_token.token_id), amount=htr_amount) - ) - - def get_syscall_update_token_records(self, operation: TokenSyscallBalance) -> list['SyscallUpdateTokenRecord']: - match operation.type: - case TokenOperationType.MINT | TokenOperationType.CREATE: - assert operation.token.amount > 0 and operation.fee_payment.amount < 0 - case TokenOperationType.MELT: - assert operation.token.amount < 0 and operation.fee_payment.amount > 0 - case _: - assert_never(operation.type) - - return operation.to_syscall_records() - - def _validate_payment_token(self, token: TokenDescription) -> bool: - if token.token_id == TokenUid(HATHOR_TOKEN_UID): - return True - raise NCInvalidFeePaymentToken("Only HTR is allowed to be used with deposit based token syscalls") - - -class _FeeTokenRules(TokenSyscallBalanceRules): - - def _get_fee_amount(self, fee_payment_token: TokenUid) -> int: - # For fee tokens, we only need to pay the transaction fee, not deposit HTR - if fee_payment_token == TokenUid(HATHOR_TOKEN_UID): - fee_amount = -self._settings.FEE_PER_OUTPUT - else: - fee_amount = -int(self._settings.FEE_PER_OUTPUT / self._settings.TOKEN_DEPOSIT_PERCENTAGE) - - assert fee_amount < 0 - return fee_amount - - def create_token( - self, - *, - token_uid: TokenUid, - token_symbol: str, - token_name: str, - amount: int, - fee_payment_token: TokenDescription - ) -> TokenSyscallBalance: - assert amount > 0 - self._validate_payment_token(fee_payment_token) - # For fee tokens, we only need to pay the transaction fee, not deposit HTR - fee_amount = self._get_fee_amount(TokenUid(fee_payment_token.token_id)) - - return TokenSyscallBalance( - type=TokenOperationType.CREATE, - token_version=TokenVersion.FEE, - token_name=token_name, - token_symbol=token_symbol, - token=TokenSyscallBalanceEntry(token_uid=self.token_uid, amount=amount), - fee_payment=TokenSyscallBalanceEntry(token_uid=TokenUid(fee_payment_token.token_id), amount=fee_amount) - ) - - def mint(self, amount: int, *, fee_payment_token: TokenDescription) -> TokenSyscallBalance: - assert amount > 0 - self._validate_payment_token(fee_payment_token) - fee_amount = self._get_fee_amount(TokenUid(fee_payment_token.token_id)) - return TokenSyscallBalance( - type=TokenOperationType.MINT, - token=TokenSyscallBalanceEntry(token_uid=self.token_uid, amount=amount), - fee_payment=TokenSyscallBalanceEntry(token_uid=TokenUid(fee_payment_token.token_id), amount=fee_amount) - ) - - def melt(self, amount: int, *, fee_payment_token: TokenDescription) -> TokenSyscallBalance: - assert amount > 0 - self._validate_payment_token(fee_payment_token) - fee_amount = self._get_fee_amount(TokenUid(fee_payment_token.token_id)) - - return TokenSyscallBalance( - type=TokenOperationType.MELT, - token=TokenSyscallBalanceEntry(token_uid=self.token_uid, amount=-amount), - fee_payment=TokenSyscallBalanceEntry(token_uid=TokenUid(fee_payment_token.token_id), amount=fee_amount) - ) - - def get_syscall_update_token_records(self, operation: TokenSyscallBalance) -> list['SyscallUpdateTokenRecord']: - assert operation.fee_payment.amount < 0 - - match operation.type: - case TokenOperationType.MINT | TokenOperationType.CREATE: - assert operation.token.amount > 0 - case TokenOperationType.MELT: - assert operation.token.amount < 0 - case _: - assert_never(operation.type) - - return operation.to_syscall_records() - - def _validate_payment_token(self, token_info: TokenDescription) -> None: - match token_info.token_version: - case TokenVersion.FEE: - raise NCInvalidFeePaymentToken("fee-based tokens aren't allowed for paying fees") - case TokenVersion.DEPOSIT: - pass - case TokenVersion.NATIVE: - pass - case _: - assert_never(token_info.token_version) diff --git a/hathor/nanocontracts/token_fees.py b/hathor/nanocontracts/token_fees.py new file mode 100644 index 000000000..e565e74a1 --- /dev/null +++ b/hathor/nanocontracts/token_fees.py @@ -0,0 +1,86 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing_extensions import assert_never + +from hathor.conf.settings import HathorSettings +from hathor.nanocontracts.exception import NCInvalidFeePaymentToken +from hathor.transaction.token_info import TokenDescription, TokenVersion +from hathor.transaction.util import get_deposit_token_deposit_amount, get_deposit_token_withdraw_amount + + +def calculate_mint_fee( + *, + settings: HathorSettings, + token_version: TokenVersion, + amount: int, + fee_payment_token: TokenDescription, +) -> int: + """Calculate the fee for a mint operation.""" + match token_version: + case TokenVersion.NATIVE: + raise AssertionError + case TokenVersion.DEPOSIT: + _validate_deposit_based_token(fee_payment_token) + return -get_deposit_token_deposit_amount(settings, amount) + case TokenVersion.FEE: + _validate_fee_based_token(fee_payment_token) + return -_calculate_fee_token_fee(settings, fee_payment_token) + case _: # pragma: no cover + assert_never(token_version) + + +def calculate_melt_fee( + *, + settings: HathorSettings, + token_version: TokenVersion, + amount: int, + fee_payment_token: TokenDescription, +) -> int: + """Calculate the fee for a melt operation.""" + match token_version: + case TokenVersion.NATIVE: + raise AssertionError + case TokenVersion.DEPOSIT: + _validate_deposit_based_token(fee_payment_token) + return +get_deposit_token_withdraw_amount(settings, amount) + case TokenVersion.FEE: + _validate_fee_based_token(fee_payment_token) + return -_calculate_fee_token_fee(settings, fee_payment_token) + case _: # pragma: no cover + assert_never(token_version) + + +def _validate_fee_based_token(fee_payment_token: TokenDescription) -> None: + match fee_payment_token.token_version: + case TokenVersion.FEE: + raise NCInvalidFeePaymentToken("fee-based tokens aren't allowed for paying fees") + case TokenVersion.DEPOSIT | TokenVersion.NATIVE: + pass + case _: # pragma: no cover + assert_never(fee_payment_token.token_version) + + +def _validate_deposit_based_token(fee_payment_token: TokenDescription) -> None: + from hathor import HATHOR_TOKEN_UID + if fee_payment_token.token_id != HATHOR_TOKEN_UID: + raise NCInvalidFeePaymentToken('Only HTR is allowed to be used with deposit based token syscalls') + + +def _calculate_fee_token_fee(settings: HathorSettings, fee_payment_token: TokenDescription) -> int: + """Calculate the fee for an handling a fee-based token""" + from hathor import HATHOR_TOKEN_UID + if fee_payment_token.token_id == HATHOR_TOKEN_UID: + return settings.FEE_PER_OUTPUT + return int(settings.FEE_PER_OUTPUT / settings.TOKEN_DEPOSIT_PERCENTAGE) diff --git a/hathor/p2p/sync_v2/transaction_streaming_client.py b/hathor/p2p/sync_v2/transaction_streaming_client.py index 2cb1b22c1..e4a2f7925 100644 --- a/hathor/p2p/sync_v2/transaction_streaming_client.py +++ b/hathor/p2p/sync_v2/transaction_streaming_client.py @@ -47,11 +47,17 @@ def __init__(self, self.protocol = self.sync_agent.protocol self.tx_storage = self.sync_agent.tx_storage self.verification_service = self.protocol.node.verification_service - # XXX: since it's not straightforward to get the correct block, it's OK to just disable checkdatasig counting, + + # XXX: Since it's not straightforward to get the correct block, it's OK to just disable checkdatasig counting, # it will be correctly enabled when doing a full validation anyway. - self.verification_params = VerificationParams(enable_checkdatasig_count=False) - self.reactor = sync_agent.reactor + # We can also set the `nc_block_root_id` to `None` because we only call `verify_basic`, + # which doesn't need it. + self.verification_params = VerificationParams( + enable_checkdatasig_count=False, + nc_block_root_id=None, + ) + self.reactor = sync_agent.reactor self.log = logger.new(peer=self.protocol.get_short_peer_id()) # List of blocks from which we will receive transactions. diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index 831970b13..d359c9e62 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -261,7 +261,7 @@ def is_transaction(self) -> bool: raise NotImplementedError def is_nano_contract(self) -> bool: - """Return True if this transaction is a nano contract or not.""" + """Return whether this transaction is a nano contract.""" return False def has_fees(self) -> bool: diff --git a/hathor/transaction/exceptions.py b/hathor/transaction/exceptions.py index 15eb8422d..704dc7fe0 100644 --- a/hathor/transaction/exceptions.py +++ b/hathor/transaction/exceptions.py @@ -274,3 +274,7 @@ class FeeHeaderTokenNotFound(InvalidFeeHeader): class InvalidFeeAmount(InvalidFeeHeader): """Invalid fee amount""" + + +class TokenNotFound(TxValidationError): + """Token not found.""" diff --git a/hathor/transaction/resources/create_tx.py b/hathor/transaction/resources/create_tx.py index eda48c0ff..2d62cf3d3 100644 --- a/hathor/transaction/resources/create_tx.py +++ b/hathor/transaction/resources/create_tx.py @@ -119,7 +119,10 @@ def _verify_unsigned_skip_pow(self, tx: Transaction) -> None: # need to run verify_inputs first to check if all inputs exist verifiers.tx.verify_inputs(tx, skip_script=True) verifiers.vertex.verify_parents(tx) - verifiers.tx.verify_sum(tx.get_complete_token_info()) + + best_block = self.manager.tx_storage.get_best_block() + block_storage = self.manager.get_nc_block_storage(best_block) + verifiers.tx.verify_sum(self.manager._settings, tx.get_complete_token_info(block_storage)) CreateTxResource.openapi = { diff --git a/hathor/transaction/storage/migrations/nc_storage_compat1.py b/hathor/transaction/storage/migrations/nc_storage_compat2.py similarity index 97% rename from hathor/transaction/storage/migrations/nc_storage_compat1.py rename to hathor/transaction/storage/migrations/nc_storage_compat2.py index 92196cece..dc4d1e3aa 100644 --- a/hathor/transaction/storage/migrations/nc_storage_compat1.py +++ b/hathor/transaction/storage/migrations/nc_storage_compat2.py @@ -29,7 +29,7 @@ def skip_empty_db(self) -> bool: return True def get_db_name(self) -> str: - return 'nc_storage_compat1' + return 'nc_storage_compat2' def run(self, storage: 'TransactionStorage') -> None: raise Exception('Cannot migrate your database due to an incompatible change in the nanocontracts storage. ' diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index e33c8755d..373cf5eff 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -44,7 +44,7 @@ add_closest_ancestor_block, change_score_acc_weight_metadata, include_funds_for_first_block, - nc_storage_compat1, + nc_storage_compat2, ) from hathor.transaction.storage.tx_allow_scope import TxAllowScope, tx_allow_context from hathor.transaction.transaction import Transaction @@ -104,7 +104,7 @@ class TransactionStorage(ABC): change_score_acc_weight_metadata.Migration, add_closest_ancestor_block.Migration, include_funds_for_first_block.Migration, - nc_storage_compat1.Migration, + nc_storage_compat2.Migration, ] _migrations: list[BaseMigration] diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index ac1c85026..ad165e093 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -18,6 +18,7 @@ from typing_extensions import override from hathor.conf.settings import HathorSettings +from hathor.nanocontracts.storage import NCBlockStorage from hathor.transaction.base_transaction import TxInput, TxOutput, TxVersion from hathor.transaction.storage import TransactionStorage # noqa: F401 from hathor.transaction.token_info import TokenInfo, TokenInfoDict, TokenVersion @@ -250,10 +251,10 @@ def to_json_extended(self) -> dict[str, Any]: return json @override - def _get_token_info_from_inputs(self) -> TokenInfoDict: - token_dict = super()._get_token_info_from_inputs() + def _get_token_info_from_inputs(self, nc_block_storage: NCBlockStorage) -> TokenInfoDict: + token_dict = super()._get_token_info_from_inputs(nc_block_storage) # we add the created token's info to token_dict, as the creation tx allows for mint/melt - token_dict[self.hash] = TokenInfo.get_default(version=self.token_version, can_mint=True, can_melt=True) + token_dict[self.hash] = TokenInfo(version=self.token_version, can_mint=True, can_melt=True) return token_dict diff --git a/hathor/transaction/token_info.py b/hathor/transaction/token_info.py index 6970863df..c713133d1 100644 --- a/hathor/transaction/token_info.py +++ b/hathor/transaction/token_info.py @@ -14,12 +14,14 @@ from dataclasses import dataclass from enum import IntEnum -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from hathor.types import TokenUid if TYPE_CHECKING: from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.storage import NCBlockStorage + from hathor.transaction.storage import TransactionStorage class TokenVersion(IntEnum): @@ -31,31 +33,15 @@ class TokenVersion(IntEnum): # used when (de)serializing token information @dataclass(slots=True, kw_only=True) class TokenInfo: - amount: int - can_mint: bool - can_melt: bool - version: TokenVersion + version: TokenVersion | None + amount: int = 0 + can_mint: bool = False + can_melt: bool = False # count of non-authority outputs that is used to calculate the fee chargeable_outputs: int = 0 # count of non-authority inputs that is used to calculate the fee chargeable_inputs: int = 0 - @classmethod - def get_default(cls, - version: TokenVersion = TokenVersion.NATIVE, - can_mint: bool = False, - can_melt: bool = False) -> 'TokenInfo': - """ - Create default deposit token info with zero amount and optional mint/melt permissions. - """ - - return TokenInfo( - amount=0, - can_mint=can_mint, - can_melt=can_melt, - version=version, - ) - def has_been_melted(self) -> bool: """ Check if this token has been melted. @@ -78,8 +64,17 @@ class TokenDescription: token_symbol: str token_version: TokenVersion + def __post_init__(self) -> None: + assert isinstance(self.token_version, TokenVersion) + class TokenInfoDict(dict[TokenUid, TokenInfo]): + __slots__ = ('fees_from_fee_header',) + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + self.fees_from_fee_header: int = 0 + def calculate_fee(self, settings: 'HathorSettings') -> int: """ Calculate the total fee based on the number of chargeable @@ -109,3 +104,26 @@ def calculate_fee(self, settings: 'HathorSettings') -> int: if token_info.chargeable_inputs > 0: fee += settings.FEE_PER_OUTPUT return fee + + +def get_token_version( + tx_storage: 'TransactionStorage', + nc_block_storage: 'NCBlockStorage', + token_uid: TokenUid +) -> TokenVersion | None: + """ + Get the token version for a given token uid. + It searches first in the tx storage and then in the block storage. + """ + from hathor.conf.settings import HATHOR_TOKEN_UID + if token_uid == HATHOR_TOKEN_UID: + return TokenVersion.NATIVE + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + try: + token_creation_tx = tx_storage.get_token_creation_transaction(token_uid) + return token_creation_tx.token_version + except TransactionDoesNotExist: + from hathor.nanocontracts.types import TokenUid + if nc_block_storage.has_token(TokenUid(token_uid)): + return nc_block_storage.get_token_description(TokenUid(token_uid)).token_version + return None diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index c61ee153c..b233b4891 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -29,7 +29,7 @@ from hathor.transaction.headers import NanoHeader, VertexBaseHeader from hathor.transaction.headers.fee_header import FeeHeader from hathor.transaction.static_metadata import TransactionStaticMetadata -from hathor.transaction.token_info import TokenInfo, TokenInfoDict, TokenVersion +from hathor.transaction.token_info import TokenInfo, TokenInfoDict, TokenVersion, get_token_version from hathor.transaction.util import VerboseCallback, unpack, unpack_len from hathor.types import TokenUid, VertexId @@ -37,6 +37,7 @@ if TYPE_CHECKING: from hathor.conf.settings import HathorSettings + from hathor.nanocontracts.storage import NCBlockStorage from hathor.transaction.storage import TransactionStorage # noqa: F401 # Signal bits (B), version (B), token uids len (B) and inputs len (B), outputs len (B). @@ -330,14 +331,17 @@ def verify_checkpoint(self, checkpoints: list[Checkpoint]) -> None: raise InvalidNewTransaction(f'Invalid new transaction {self.hash_hex}: expected to reach a checkpoint but ' 'none of its children is checkpoint-valid') - def get_complete_token_info(self) -> TokenInfoDict: + def get_complete_token_info(self, nc_block_storage: NCBlockStorage) -> TokenInfoDict: """ Get a complete token info dict, including data from both inputs and outputs. + It uses a block storage with the latest token changes in nano contracts """ - token_dict = self._get_token_info_from_inputs() - self._update_token_info_from_nano_actions(token_dict=token_dict) - # This one must be called last so token_dict already contains all tokens in inputs and nano actions. + + token_dict = self._get_token_info_from_inputs(nc_block_storage) + self._update_token_info_from_nano_actions(token_dict=token_dict, nc_block_storage=nc_block_storage) + # These must be called last so token_dict already contains all tokens in inputs and nano actions. self._update_token_info_from_outputs(token_dict=token_dict) + self._update_token_info_from_fees(token_dict=token_dict) return token_dict @@ -348,48 +352,71 @@ def get_minimum_number_of_inputs(self) -> int: return 0 return 1 - def _update_token_info_from_nano_actions(self, *, token_dict: TokenInfoDict) -> None: + def _update_token_info_from_nano_actions( + self, + *, + token_dict: TokenInfoDict, + nc_block_storage: NCBlockStorage, + ) -> None: """Update token_dict with nano actions.""" if not self.is_nano_contract(): return + assert self.storage is not None from hathor.nanocontracts.balance_rules import BalanceRules nano_header = self.get_nano_header() for action in nano_header.get_actions(): rules = BalanceRules.get_rules(self._settings, action) + if action.token_uid not in token_dict: + # we try to load this token version from storage in case it's not in the inputs + token_dict[action.token_uid] = TokenInfo( + version=get_token_version(self.storage, nc_block_storage, action.token_uid) + ) rules.verification_rule(token_dict) - def _get_token_info_from_inputs(self) -> TokenInfoDict: + def _update_token_info_from_fees(self, *, token_dict: TokenInfoDict) -> None: + """Update token_dict with fees from fee header""" + + if not self.has_fees(): + return + + fee_header = self.get_fee_header() + fees = fee_header.get_fees() + # we store the total fee amount from the header to be used in the verify_sum + token_dict.fees_from_fee_header = fee_header.total_fee_amount() + for fee in fees: + token_info = token_dict.get(fee.token_uid) + if token_info is None: + raise InvalidToken('no inputs/actions for token {}'.format(fee.token_uid.hex())) + + # it should be defined in the inputs/actions + if token_info.version not in (None, TokenVersion.NATIVE, TokenVersion.DEPOSIT): + raise InvalidToken('token {} cannot be used to pay fees'.format(fee.token_uid.hex())) + + # act as a regular output subtracting from the total amount (which is done with sum in this context) + token_info.amount += fee.amount + token_dict[fee.token_uid] = token_info + + def _get_token_info_from_inputs(self, nc_block_storage: NCBlockStorage) -> TokenInfoDict: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ + assert self.storage is not None token_dict = TokenInfoDict() # add HTR to token dict due to tx melting tokens: there might be an HTR output without any # input or authority. If we don't add it, an error will be raised when iterating through # the outputs of such tx (error: 'no token creation and no inputs for token 00') - token_dict[self._settings.HATHOR_TOKEN_UID] = TokenInfo.get_default() + token_dict[self._settings.HATHOR_TOKEN_UID] = TokenInfo(version=TokenVersion.NATIVE) for tx_input in self.inputs: spent_tx = self.get_spent_tx(tx_input) spent_output = spent_tx.outputs[tx_input.index] token_uid = spent_tx.get_token_uid(spent_output.get_token_index()) - token_version = TokenVersion.NATIVE - - if token_uid != self._settings.HATHOR_TOKEN_UID: - from hathor.transaction.storage.exceptions import TransactionDoesNotExist - assert self.storage is not None - try: - token_creation_tx = self.storage.get_token_creation_transaction(token_uid) - except TransactionDoesNotExist: - raise InvalidToken(f"Token UID {token_uid!r} does not match any token creation transaction") - token_version = token_creation_tx.token_version - - token_info = token_dict.get( - token_uid, - TokenInfo.get_default(version=token_version) - ) + token_version = get_token_version(self.storage, nc_block_storage, token_uid) + + token_info = token_dict.get(token_uid, TokenInfo(version=token_version)) if spent_output.is_token_authority(): token_info.can_mint = token_info.can_mint or spent_output.can_mint_token() diff --git a/hathor/transaction/types.py b/hathor/transaction/types.py index 34a59e9b1..1920984cc 100644 --- a/hathor/transaction/types.py +++ b/hathor/transaction/types.py @@ -18,7 +18,8 @@ from typing import TYPE_CHECKING, Any, Self if TYPE_CHECKING: - from hathor.nanocontracts.runner.types import CallRecord, NCIndexUpdateRecord + from hathor.nanocontracts.runner.call_info import CallRecord + from hathor.nanocontracts.runner.index_records import NCIndexUpdateRecord, nc_index_update_record_to_json @dataclass(slots=True, frozen=True, kw_only=True) @@ -35,13 +36,13 @@ def to_json(self) -> dict[str, Any]: blueprint_id=self.blueprint_id.hex(), contract_id=self.contract_id.hex(), method_name=self.method_name, - index_updates=[syscall.to_json() for syscall in self.index_updates] + index_updates=[nc_index_update_record_to_json(record) for record in self.index_updates] ) @classmethod def from_json(cls, json_dict: dict[str, Any]) -> Self: """Create an instance from a json dict.""" - from hathor.nanocontracts.runner.types import nc_index_update_record_from_json + from hathor.nanocontracts.runner.index_records import nc_index_update_record_from_json return cls( blueprint_id=bytes.fromhex(json_dict['blueprint_id']), contract_id=bytes.fromhex(json_dict['contract_id']), diff --git a/hathor/verification/transaction_verifier.py b/hathor/verification/transaction_verifier.py index 0242e9179..dec5cad12 100644 --- a/hathor/verification/transaction_verifier.py +++ b/hathor/verification/transaction_verifier.py @@ -14,7 +14,6 @@ from __future__ import annotations -from dataclasses import dataclass from typing import TYPE_CHECKING, assert_never from hathor.daa import DifficultyAdjustmentAlgorithm @@ -40,6 +39,7 @@ RewardLocked, ScriptError, TimestampError, + TokenNotFound, TooFewInputs, TooManyBetweenConflicts, TooManyInputs, @@ -236,10 +236,19 @@ def verify_output_token_indexes(self, tx: Transaction) -> None: if output.get_token_index() > len(tx.tokens): raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index())) - def verify_sum(self, token_dict: TokenInfoDict) -> None: + @classmethod + def verify_sum( + cls, + settings: HathorSettings, + token_dict: TokenInfoDict, + allow_nonexistent_tokens: bool = False, + ) -> None: """Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs and outputs is not 0, make sure inputs have mint/melt authority. + When `allow_nonexistent_tokens` flag is set to `True` and a nonexistent token is provided, + this method will skip the fee and HTR balance checks. + token_dict sums up all tokens present in the tx and their properties (amount, can_mint, can_melt) amount = outputs - inputs, thus: - amount < 0 when melting @@ -249,74 +258,74 @@ def verify_sum(self, token_dict: TokenInfoDict) -> None: """ deposit = 0 withdraw = 0 - withdraw_without_authority = 0 - fee = token_dict.calculate_fee(self._settings) + has_nonexistent_tokens = False for token_uid, token_info in token_dict.items(): + cls._check_token_permissions(token_uid, token_info) match token_info.version: + case None: + # when a token is not found, we can't assert the HTR value, since we don't know its version + if not allow_nonexistent_tokens: + raise TokenNotFound(f'token uid {token_uid.hex()} not found.') + has_nonexistent_tokens = True + case TokenVersion.NATIVE: continue + case TokenVersion.DEPOSIT: - result = self._verify_deposit_token(fee, token_uid, token_info) - deposit += result.deposit - withdraw += result.withdraw - withdraw_without_authority += result.withdraw_without_authority + if token_info.has_been_melted(): + withdraw += get_deposit_token_withdraw_amount(settings, token_info.amount) + if token_info.has_been_minted(): + deposit += get_deposit_token_deposit_amount(settings, token_info.amount) + case TokenVersion.FEE: - self._verify_fee_token(token_uid, token_info) - case _: - assert_never(token_info) + continue - is_melting_without_authority = withdraw_without_authority - fee > 0 - if is_melting_without_authority: - raise ForbiddenMelt('Melting tokens without a melt authority is forbidden') + case _: + assert_never(token_info.version) # check whether the deposit/withdraw amount is correct - htr_expected_amount = withdraw + withdraw_without_authority - deposit - fee - htr_info = token_dict[self._settings.HATHOR_TOKEN_UID] - if htr_info.amount != htr_expected_amount: + htr_expected_amount = withdraw - deposit + htr_info = token_dict[settings.HATHOR_TOKEN_UID] + if htr_info.amount < htr_expected_amount: raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( htr_info.amount, htr_expected_amount, )) - def _verify_fee_token(self, token_uid: TokenUid, token_info: TokenInfo) -> None: - """Verify fee token can be minted/melted based on its authority.""" + # in a partial validation, it's not possible to check fees and + # htr amount since it depends on verification with all token versions + if has_nonexistent_tokens: + return + + expected_fee = token_dict.calculate_fee(settings) + if expected_fee != token_dict.fees_from_fee_header: + raise InputOutputMismatch(f"Fee amount is different than expected. " + f"(amount={token_dict.fees_from_fee_header}, expected={expected_fee})") + + if htr_info.amount > htr_expected_amount: + raise InputOutputMismatch('HTR balance is different than expected. (amount={}, expected={})'.format( + htr_info.amount, + htr_expected_amount, + )) + + assert htr_info.amount == htr_expected_amount + + @staticmethod + def _check_token_permissions(token_uid: TokenUid, token_info: TokenInfo) -> None: + """Verify whether token can be minted/melted based on its authority.""" + from hathor.conf.settings import HATHOR_TOKEN_UID + if token_info.version == TokenVersion.NATIVE: + assert token_uid == HATHOR_TOKEN_UID + assert not token_info.can_mint + assert not token_info.can_melt + return + assert token_uid != HATHOR_TOKEN_UID if token_info.has_been_melted() and not token_info.can_melt: raise ForbiddenMelt.from_token(token_info.amount, token_uid) if token_info.has_been_minted() and not token_info.can_mint: raise ForbiddenMint(token_info.amount, token_uid) - def _verify_deposit_token(self, fee: int, token_uid: TokenUid, token_info: TokenInfo) -> DepositTokenVerifyResult: - """Verify deposit token operations and calculate withdrawal/deposit amounts.""" - result = DepositTokenVerifyResult() - if token_info.has_been_melted(): - withdraw_amount = get_deposit_token_withdraw_amount(self._settings, token_info.amount) - if token_info.can_melt: - result.withdraw += withdraw_amount - else: - # Any melting operation without authority is forbidden. - # It includes trying to pay fee with non-integer amounts. - # For example (DBT - Deposit based token) - # 1.99 DBT results in 0.01 HTR and (0.99 DBT melted) => this one is forbidden - if fee == 0: - raise ForbiddenMelt.from_token(token_info.amount, token_uid) - is_integer_amount = ( - token_info.amount * self._settings.TOKEN_DEPOSIT_PERCENTAGE).is_integer() - if not is_integer_amount: - raise ForbiddenMelt( - "Paying fees with non integer amount is forbidden" - ) - - result.withdraw_without_authority += withdraw_amount - - if token_info.has_been_minted(): - if not token_info.can_mint: - raise ForbiddenMint(token_info.amount, token_uid) - - result.deposit += get_deposit_token_deposit_amount(self._settings, token_info.amount) - - return result - def verify_version(self, tx: Transaction, params: VerificationParams) -> None: """Verify that the vertex version is valid.""" allowed_tx_versions = { @@ -391,10 +400,3 @@ def verify_conflict(self, tx: Transaction, params: VerificationParams) -> None: if between_counter > MAX_BETWEEN_CONFLICTS: raise TooManyBetweenConflicts - - -@dataclass(kw_only=True, slots=True) -class DepositTokenVerifyResult: - deposit: int = 0 - withdraw_without_authority: int = 0 - withdraw: int = 0 diff --git a/hathor/verification/verification_params.py b/hathor/verification/verification_params.py index f069527a5..7c24a06f2 100644 --- a/hathor/verification/verification_params.py +++ b/hathor/verification/verification_params.py @@ -16,11 +16,14 @@ from dataclasses import dataclass +from hathor.transaction import Block + @dataclass(slots=True, frozen=True, kw_only=True) class VerificationParams: """Contains every parameter/setting to run a single verification.""" + nc_block_root_id: bytes | None enable_checkdatasig_count: bool reject_locked_reward: bool = True skip_block_weight_verification: bool = False @@ -32,12 +35,21 @@ class VerificationParams: reject_conflicts_with_confirmed_txs: bool = False @classmethod - def default_for_mempool(cls, *, enable_nano: bool = False) -> VerificationParams: + def default_for_mempool( + cls, + *, + best_block: Block, + enable_nano: bool = False, + ) -> VerificationParams: """This is the appropriate parameters for verifying mempool transactions, realtime blocks and API pushes. Other cases should instantiate `VerificationParams` manually with the appropriate parameter values. """ + best_block_meta = best_block.get_metadata() + if best_block_meta.nc_block_root_id is None: + assert best_block.is_genesis return cls( + nc_block_root_id=best_block_meta.nc_block_root_id, enable_checkdatasig_count=True, enable_nano=enable_nano, reject_too_old_vertices=True, diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 6d1c66d11..5d4f196da 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -15,7 +15,8 @@ from typing_extensions import assert_never from hathor.conf.settings import HathorSettings -from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts import NCStorageFactory, OnChainBlueprint +from hathor.nanocontracts.storage import NCBlockStorage from hathor.profiler import get_cpu_profiler from hathor.transaction import BaseTransaction, Block, MergeMinedBlock, Transaction, TxVersion from hathor.transaction.poa import PoaBlock @@ -23,6 +24,7 @@ from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.token_info import TokenInfoDict from hathor.transaction.validation_state import ValidationState +from hathor.verification.fee_header_verifier import FeeHeaderVerifier from hathor.verification.verification_params import VerificationParams from hathor.verification.vertex_verifiers import VertexVerifiers @@ -30,7 +32,7 @@ class VerificationService: - __slots__ = ('_settings', 'verifiers', '_tx_storage') + __slots__ = ('_settings', 'verifiers', '_tx_storage', '_nc_storage_factory') def __init__( self, @@ -38,10 +40,12 @@ def __init__( settings: HathorSettings, verifiers: VertexVerifiers, tx_storage: TransactionStorage | None = None, + nc_storage_factory: NCStorageFactory | None = None, ) -> None: self._settings = settings self.verifiers = verifiers self._tx_storage = tx_storage + self._nc_storage_factory = nc_storage_factory def validate_basic(self, vertex: BaseTransaction, params: VerificationParams) -> bool: """ Run basic validations (all that are possible without dependencies) and update the validation state. @@ -258,8 +262,15 @@ def _verify_tx( self.verify_without_storage(tx, params) self.verifiers.tx.verify_sigops_input(tx, params.enable_checkdatasig_count) self.verifiers.tx.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist - self.verifiers.tx.verify_sum(token_dict or tx.get_complete_token_info()) self.verifiers.tx.verify_version(tx, params) + + block_storage = self._get_block_storage(params) + self.verifiers.tx.verify_sum( + self._settings, + token_dict or tx.get_complete_token_info(block_storage), + # if this tx isn't a nano contract we assume we can find all the tokens to validate this tx + allow_nonexistent_tokens=tx.is_nano_contract() + ) self.verifiers.vertex.verify_parents(tx) self.verifiers.tx.verify_conflict(tx, params) if params.reject_locked_reward: @@ -272,7 +283,7 @@ def _verify_token_creation_tx(self, tx: TokenCreationTransaction, params: Verifi """ # we should validate the token info before verifying the tx self.verifiers.token_creation_tx.verify_token_info(tx, params) - token_dict = tx.get_complete_token_info() + token_dict = tx.get_complete_token_info(self._get_block_storage(params)) self._verify_tx(tx, params, token_dict=token_dict) self.verifiers.token_creation_tx.verify_minted_tokens(tx, token_dict) @@ -280,6 +291,9 @@ def verify_without_storage(self, vertex: BaseTransaction, params: VerificationPa if vertex.hash in self._settings.SKIP_VERIFICATION: return + if vertex.has_fees(): + self._verify_without_storage_fee_header(vertex) + # We assert with type() instead of isinstance() because each subclass has a specific branch. match vertex.version: case TxVersion.REGULAR_BLOCK: @@ -305,7 +319,7 @@ def verify_without_storage(self, vertex: BaseTransaction, params: VerificationPa if vertex.is_nano_contract(): assert self._settings.ENABLE_NANO_CONTRACTS - self._verify_without_storage_nano_header(vertex, params) + self._verify_without_storage_nano_header(vertex) def _verify_without_storage_base_block(self, block: Block, params: VerificationParams) -> None: self.verifiers.block.verify_no_inputs(block) @@ -344,11 +358,16 @@ def _verify_without_storage_token_creation_tx( ) -> None: self._verify_without_storage_tx(tx, params) - def _verify_without_storage_nano_header(self, tx: BaseTransaction, params: VerificationParams) -> None: + def _verify_without_storage_nano_header(self, tx: BaseTransaction) -> None: assert tx.is_nano_contract() self.verifiers.nano_header.verify_nc_signature(tx) self.verifiers.nano_header.verify_actions(tx) + def _verify_without_storage_fee_header(self, tx: BaseTransaction) -> None: + assert tx.has_fees() + assert isinstance(tx, Transaction) + FeeHeaderVerifier.verify_fee_list(tx.get_fee_header(), tx) + def _verify_without_storage_on_chain_blueprint( self, tx: OnChainBlueprint, @@ -358,3 +377,9 @@ def _verify_without_storage_on_chain_blueprint( self.verifiers.on_chain_blueprint.verify_pubkey_is_allowed(tx) self.verifiers.on_chain_blueprint.verify_nc_signature(tx) self.verifiers.on_chain_blueprint.verify_code(tx) + + def _get_block_storage(self, params: VerificationParams) -> NCBlockStorage: + assert self._nc_storage_factory is not None + if params.nc_block_root_id is None: + return self._nc_storage_factory.get_empty_block_storage() + return self._nc_storage_factory.get_block_storage(params.nc_block_root_id) diff --git a/hathor/vertex_handler/vertex_handler.py b/hathor/vertex_handler/vertex_handler.py index 46a9283da..7ccc55325 100644 --- a/hathor/vertex_handler/vertex_handler.py +++ b/hathor/vertex_handler/vertex_handler.py @@ -88,6 +88,7 @@ def on_new_block(self, block: Block, *, deps: list[Transaction]) -> Generator[An """Called by block sync.""" parent_block_hash = block.get_block_parent_hash() parent_block = self._tx_storage.get_block(parent_block_hash) + parent_meta = parent_block.get_metadata() enable_checkdatasig_count = self._feature_service.is_feature_active( vertex=parent_block, @@ -98,7 +99,15 @@ def on_new_block(self, block: Block, *, deps: list[Transaction]) -> Generator[An settings=self._settings, block=parent_block, feature_service=self._feature_service ) - params = VerificationParams(enable_checkdatasig_count=enable_checkdatasig_count, enable_nano=enable_nano) + if parent_meta.nc_block_root_id is None: + # This case only happens for the genesis and during sync of a voided chain. + assert parent_block.is_genesis or parent_meta.voided_by + + params = VerificationParams( + enable_checkdatasig_count=enable_checkdatasig_count, + enable_nano=enable_nano, + nc_block_root_id=parent_meta.nc_block_root_id, + ) for tx in deps: if not self._tx_storage.transaction_exists(tx.hash): @@ -117,7 +126,10 @@ def on_new_mempool_transaction(self, tx: Transaction) -> bool: """Called by mempool sync.""" best_block = self._tx_storage.get_best_block() enable_nano = is_nano_active(settings=self._settings, block=best_block, feature_service=self._feature_service) - params = VerificationParams.default_for_mempool(enable_nano=enable_nano) + params = VerificationParams.default_for_mempool( + enable_nano=enable_nano, + best_block=best_block, + ) return self._old_on_new_vertex(tx, params) @cpu.profiler('on_new_relayed_vertex') @@ -130,10 +142,16 @@ def on_new_relayed_vertex( ) -> bool: """Called for unsolicited vertex received, usually due to real time relay.""" best_block = self._tx_storage.get_best_block() + best_block_meta = best_block.get_metadata() enable_nano = is_nano_active(settings=self._settings, block=best_block, feature_service=self._feature_service) + if best_block_meta.nc_block_root_id is None: + assert best_block.is_genesis # XXX: checkdatasig enabled for relayed vertices params = VerificationParams( - enable_checkdatasig_count=True, reject_locked_reward=reject_locked_reward, enable_nano=enable_nano + enable_checkdatasig_count=True, + reject_locked_reward=reject_locked_reward, + enable_nano=enable_nano, + nc_block_root_id=best_block_meta.nc_block_root_id, ) return self._old_on_new_vertex(vertex, params, quiet=quiet) diff --git a/hathor/wallet/resources/send_tokens.py b/hathor/wallet/resources/send_tokens.py index b78e44871..4ba1afa06 100644 --- a/hathor/wallet/resources/send_tokens.py +++ b/hathor/wallet/resources/send_tokens.py @@ -43,7 +43,6 @@ def __init__(self, manager: HathorManager, settings: HathorSettings) -> None: # Important to have the manager so we can know the tx_storage self.manager = manager self._settings = settings - self.params = VerificationParams.default_for_mempool() def render_POST(self, request): """ POST request for /wallet/send_tokens/ @@ -134,7 +133,9 @@ def _render_POST_thread(self, values: dict[str, Any], request: Request) -> Union tx.weight = weight self.manager.cpu_mining_service.resolve(tx) tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx, self.params) + best_block = self.manager.tx_storage.get_best_block() + params = VerificationParams.default_for_mempool(best_block=best_block) + self.manager.verification_service.verify(tx, params) return tx def _cb_tx_resolve(self, tx, request): diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index cd4076cca..88ca7a425 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -61,7 +61,6 @@ def __init__(self, manager): self.sleep_seconds = 0 self.log = logger.new() self.reactor = get_global_reactor() - self.params = VerificationParams.default_for_mempool() def render_POST(self, request: Request) -> Any: """ POST request for /thin_wallet/send_tokens/ @@ -216,7 +215,9 @@ def _stratum_deferred_resolve(self, context: _Context) -> None: def _stratum_thread_verify(self, context: _Context) -> _Context: """ Method to verify the transaction that runs in a separated thread """ - self.manager.verification_service.verify(context.tx, self.params) + best_block = self.manager.tx_storage.get_best_block() + params = VerificationParams.default_for_mempool(best_block=best_block) + self.manager.verification_service.verify(context.tx, params) return context def _stratum_timeout(self, result: Failure, timeout: int, *, context: _Context) -> None: @@ -273,7 +274,9 @@ def _should_stop(): raise CancelledError() context.tx.update_hash() context.tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(context.tx, self.params) + best_block = self.manager.tx_storage.get_best_block() + params = VerificationParams.default_for_mempool(best_block=best_block) + self.manager.verification_service.verify(context.tx, params) return context def _cb_tx_resolve(self, context: _Context) -> None: diff --git a/tests/feature_activation/test_feature_simulation.py b/tests/feature_activation/test_feature_simulation.py index 17899666f..6aaf0cdb0 100644 --- a/tests/feature_activation/test_feature_simulation.py +++ b/tests/feature_activation/test_feature_simulation.py @@ -299,7 +299,7 @@ def test_feature(self) -> None: non_signaling_block.init_static_metadata_from_storage(settings, manager.tx_storage) with pytest.raises(BlockMustSignalError): - manager.verification_service.verify(non_signaling_block, self.verification_params) + manager.verification_service.verify(non_signaling_block, self.get_verification_params(manager)) with pytest.raises(InvalidNewTransaction): manager.propagate_tx(non_signaling_block) diff --git a/tests/nanocontracts/blueprints/unittest.py b/tests/nanocontracts/blueprints/unittest.py index ab17f3b79..7563cdd0b 100644 --- a/tests/nanocontracts/blueprints/unittest.py +++ b/tests/nanocontracts/blueprints/unittest.py @@ -1,10 +1,9 @@ from io import TextIOWrapper from typing import Sequence -from hathor.conf.settings import HATHOR_TOKEN_UID from hathor.crypto.util import decode_address from hathor.manager import HathorManager -from hathor.nanocontracts import Context +from hathor.nanocontracts import HATHOR_TOKEN_UID, Context from hathor.nanocontracts.blueprint import Blueprint from hathor.nanocontracts.blueprint_env import BlueprintEnvironment from hathor.nanocontracts.nc_exec_logs import NCLogConfig diff --git a/tests/nanocontracts/test_actions.py b/tests/nanocontracts/test_actions.py index 3ba97590e..f74ccb5c1 100644 --- a/tests/nanocontracts/test_actions.py +++ b/tests/nanocontracts/test_actions.py @@ -18,9 +18,8 @@ import pytest -from hathor.conf.settings import HATHOR_TOKEN_UID from hathor.indexes.tokens_index import TokensIndex -from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, public +from hathor.nanocontracts import HATHOR_TOKEN_UID, NC_EXECUTION_FAIL_ID, Blueprint, Context, public from hathor.nanocontracts.catalog import NCBlueprintCatalog from hathor.nanocontracts.exception import NCInvalidAction from hathor.nanocontracts.nc_exec_logs import NCLogConfig @@ -77,10 +76,10 @@ class TestActions(unittest.TestCase): def setUp(self) -> None: super().setUp() - self.verification_params = VerificationParams.default_for_mempool(enable_nano=True) self.bp_id = b'1' * 32 self.manager = self.create_peer('unittests', nc_log_config=NCLogConfig.FAILED, wallet_index=True) + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ self.bp_id: MyBlueprint }) @@ -118,6 +117,11 @@ def setUp(self) -> None: ['tx0', 'tx1', 'tx2', 'TKA'], Transaction, ) + best_block = self.manager.tx_storage.get_best_block() + self.verification_params = VerificationParams.default_for_mempool( + enable_nano=True, + best_block=best_block, + ) # We finish a manual setup of tx1, so it can be used directly in verification methods. self.tx1.storage = self.manager.tx_storage diff --git a/tests/nanocontracts/test_actions_fee.py b/tests/nanocontracts/test_actions_fee.py index d4390dcf7..5d6d1fd09 100644 --- a/tests/nanocontracts/test_actions_fee.py +++ b/tests/nanocontracts/test_actions_fee.py @@ -7,7 +7,10 @@ from hathor.nanocontracts.storage.contract_storage import Balance, BalanceKey from hathor.nanocontracts.types import ContractId, NCDepositAction, NCFee, NCWithdrawalAction, TokenUid, public from hathor.nanocontracts.utils import derive_child_token_id +from hathor.transaction import Transaction +from tests.dag_builder.builder import TestDAGBuilder from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.test_reentrancy import HTR_TOKEN_UID class MyBlueprint(Blueprint): @@ -63,10 +66,11 @@ def move_tokens_to_nc( class MyOtherBlueprint(Blueprint): + fbt_uid: TokenUid @public(allow_deposit=True, allow_withdrawal=True, allow_grant_authority=True) def initialize(self, ctx: Context) -> None: - self.syscall.create_fee_token( + self.fbt_uid = self.syscall.create_fee_token( token_name='FBT', token_symbol='FBT', amount=1_000_000, @@ -74,6 +78,16 @@ def initialize(self, ctx: Context) -> None: melt_authority=True, ) + @public(allow_deposit=True, allow_withdrawal=True) + def move_tokens_to_nc( + self, + ctx: Context, + nc_id: ContractId, + ) -> None: + action = NCDepositAction(token_uid=self.fbt_uid, amount=1000) + fees = [NCFee(token_uid=TokenUid(HTR_TOKEN_UID), amount=1)] + self.syscall.get_contract(nc_id, blueprint_id=None).public(action, fees=fees).noop() + class NCActionsFeeTestCase(BlueprintTestCase): def setUp(self) -> None: @@ -346,3 +360,65 @@ def test_create_and_actions(self) -> None: htr_balance_key: Balance(value=0, can_mint=False, can_melt=False), fbt_balance_key: Balance(value=900_000, can_mint=True, can_melt=True), } + + def test_token_index_updates(self) -> None: + """Test token creation, token movement between contracts, and verify token indexes.""" + # Register the blueprint + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + # Build the DAG: create two contracts, create tokens in first, then move tokens to second + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..13] + b10 < dummy + + tx1.nc_id = "{self.my_other_blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 100 HTR + + tx2.nc_id = "{self.my_blueprint_id.hex()}" + tx2.nc_method = initialize() + + tx3.nc_id = tx1 + tx3.nc_method = move_tokens_to_nc(`tx2`) + + tx1 < b11 < tx2 < b12 < tx3 < b13 + tx1 <-- b11 + tx2 <-- b12 + tx3 <-- b13 + ''') + + # Propagate transactions and blocks + artifacts.propagate_with(self.manager) + + tx1, tx2, tx3 = artifacts.get_typed_vertices(('tx1', 'tx2', 'tx3'), Transaction) + + # Get tokens index + tokens_index = self.manager.tx_storage.indexes.tokens + assert tokens_index is not None + + fbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='FBT') + fbt_token_info = tokens_index.get_token_info(fbt_id) + assert fbt_token_info.get_total() == 1_000_000 + + # Verify HTR total (genesis + mined blocks - fees paid) + # Genesis: GENESIS_TOKENS + 13 blocks * INITIAL_TOKENS_PER_BLOCK + # Fees: 1% of 1000 (10 HTR) + 1 HTR (fee token creation) + 1 HTR (move_tokens_to_nc) + htr_token_info = tokens_index.get_token_info(HATHOR_TOKEN_UID) + expected_htr_total = ( + self._settings.GENESIS_TOKENS + + 13 * self._settings.INITIAL_TOKENS_PER_BLOCK + - 1 # 1 HTR fee for fee token creation + - 1 # 1 HTR fee for first move_tokens_to_nc + ) + assert htr_token_info.get_total() == expected_htr_total + + # Verify contract balances after all operations + nc1_storage = self.manager.get_best_block_nc_storage(tx1.hash) + nc2_storage = self.manager.get_best_block_nc_storage(tx2.hash) + + assert nc1_storage.get_balance(HATHOR_TOKEN_UID) == Balance(value=98, can_mint=False, can_melt=False) + assert nc1_storage.get_balance(fbt_id) == Balance(value=999_000, can_mint=True, can_melt=True) + + # nc2 should have: 0 HTR, 1000 FBT + assert nc2_storage.get_balance(HATHOR_TOKEN_UID) == Balance(value=0, can_mint=False, can_melt=False) + assert nc2_storage.get_balance(fbt_id) == Balance(value=1000, can_mint=False, can_melt=False) diff --git a/tests/nanocontracts/test_contract_create_contract.py b/tests/nanocontracts/test_contract_create_contract.py index 00d48666d..18d7bc96f 100644 --- a/tests/nanocontracts/test_contract_create_contract.py +++ b/tests/nanocontracts/test_contract_create_contract.py @@ -1,7 +1,6 @@ from typing import Optional -from hathor.conf.settings import HATHOR_TOKEN_UID -from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts import HATHOR_TOKEN_UID, Blueprint, Context, public from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_arg_type as make_nc_type from hathor.nanocontracts.storage.contract_storage import Balance from hathor.nanocontracts.types import ( diff --git a/tests/nanocontracts/test_execution_order.py b/tests/nanocontracts/test_execution_order.py index 9ff098d25..8374498a6 100644 --- a/tests/nanocontracts/test_execution_order.py +++ b/tests/nanocontracts/test_execution_order.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.conf.settings import HATHOR_TOKEN_UID -from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts import HATHOR_TOKEN_UID, Blueprint, Context, public from hathor.nanocontracts.types import ( ContractId, NCAction, diff --git a/tests/nanocontracts/test_fallback_method.py b/tests/nanocontracts/test_fallback_method.py index 0d1ad2158..0022b3ca4 100644 --- a/tests/nanocontracts/test_fallback_method.py +++ b/tests/nanocontracts/test_fallback_method.py @@ -17,8 +17,7 @@ import pytest -from hathor.conf.settings import HATHOR_TOKEN_UID -from hathor.nanocontracts import NC_EXECUTION_FAIL_ID, Blueprint, Context, NCFail, public +from hathor.nanocontracts import HATHOR_TOKEN_UID, NC_EXECUTION_FAIL_ID, Blueprint, Context, NCFail, public from hathor.nanocontracts.exception import NCError, NCInvalidMethodCall from hathor.nanocontracts.method import ArgsOnly from hathor.nanocontracts.nc_exec_logs import NCCallBeginEntry, NCCallEndEntry diff --git a/tests/nanocontracts/test_fee_tokens.py b/tests/nanocontracts/test_fee_tokens.py new file mode 100644 index 000000000..0ec4f990e --- /dev/null +++ b/tests/nanocontracts/test_fee_tokens.py @@ -0,0 +1,411 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from hathor import Blueprint, Context, ContractId, NCActionType, public +from hathor.exception import InvalidNewTransaction +from hathor.nanocontracts import NC_EXECUTION_FAIL_ID +from hathor.nanocontracts.utils import derive_child_token_id +from hathor.transaction import Block, Transaction, TxInput, TxOutput +from hathor.transaction.headers import FeeHeader +from hathor.transaction.headers.fee_header import FeeHeaderEntry +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.scripts import Opcode +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.utils import assert_nc_failure_reason + + +class MyBlueprint(Blueprint): + @public(allow_deposit=True) + def initialize(self, ctx: Context) -> None: + pass + + @public(allow_withdrawal=True) + def create_deposit_token(self, ctx: Context) -> None: + self.syscall.create_deposit_token( + token_name='deposit-based token', + token_symbol='DBT', + amount=100, + ) + + @public(allow_withdrawal=True) + def create_fee_token(self, ctx: Context) -> None: + self.syscall.create_fee_token( + token_name='fee-based token', + token_symbol='FBT', + amount=10 ** 9, + ) + + @public(allow_withdrawal=True) + def nop(self, ctx: Context) -> None: + pass + + +class FeeTokensTestCase(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + self.blueprint_id = self._register_blueprint_class(MyBlueprint) + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + + def test_postponed_verification_success(self) -> None: + """Postponed verification means running verify_sum on NC execution-time instead of verification-time.""" + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.nc_id = tx1 + tx2.nc_method = create_fee_token() + tx2.fee = 1 HTR + + tx1 < tx2 + tx1 <-- b11 + tx2 <-- b12 + ''') + + b11, b12 = artifacts.get_typed_vertices(('b11', 'b12'), Block) + tx1, tx2 = artifacts.get_typed_vertices(('tx1', 'tx2'), Transaction) + + fbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='FBT') + tx2.tokens.append(fbt_id) + + fbt_output = TxOutput(value=10 ** 9, script=b'', token_data=1) + tx2.outputs.append(fbt_output) + + fbt_withdraw = NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10 ** 9) + tx2_nano_header = tx2.get_nano_header() + tx2_nano_header.nc_actions.append(fbt_withdraw) + + artifacts.propagate_with(self.manager, up_to='b11') + assert tx1.get_metadata().first_block == b11.hash + assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx1.get_metadata().voided_by is None + + artifacts.propagate_with(self.manager, up_to='b12') + assert tx2.get_metadata().first_block == b12.hash + assert tx2.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx2.get_metadata().voided_by is None + + def test_postponed_verification_fail_nonexistent(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.nc_id = tx1 + tx2.nc_method = nop() + tx2.fee = 1 HTR + + tx1 < tx2 + tx1 <-- b11 + tx2 <-- b12 + ''') + + b11, b12 = artifacts.get_typed_vertices(('b11', 'b12'), Block) + tx1, tx2 = artifacts.get_typed_vertices(('tx1', 'tx2'), Transaction) + + fbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='FBT') + tx2.tokens.append(fbt_id) + + fbt_output = TxOutput(value=10 ** 9, script=b'', token_data=1) + tx2.outputs.append(fbt_output) + + fbt_withdraw = NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10 ** 9) + tx2_nano_header = tx2.get_nano_header() + tx2_nano_header.nc_actions.append(fbt_withdraw) + + artifacts.propagate_with(self.manager, up_to='b11') + assert tx1.get_metadata().first_block == b11.hash + assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx1.get_metadata().voided_by is None + + artifacts.propagate_with(self.manager, up_to='b12') + assert tx2.get_metadata().first_block == b12.hash + assert tx2.get_metadata().nc_execution == NCExecutionState.FAILURE + assert tx2.get_metadata().voided_by == {NC_EXECUTION_FAIL_ID, tx2.hash} + + # It fails with a balance error caused by the withdrawal, + # because this check runs before the postponed verify_sum. + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx2.hash, + block_id=b12.hash, + reason='NCInsufficientFunds: negative balance for contract', + ) + + def test_postponed_verification_fail_with_dbt(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.nc_id = tx1 + tx2.nc_method = create_deposit_token() + tx2.fee = 1 HTR + + tx1 < tx2 + tx1 <-- b11 + tx2 <-- b12 + ''') + + b11, b12 = artifacts.get_typed_vertices(('b11', 'b12'), Block) + tx1, tx2 = artifacts.get_typed_vertices(('tx1', 'tx2'), Transaction) + + dbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='DBT') + tx2.tokens.append(dbt_id) + + dbt_output = TxOutput(value=100, script=b'', token_data=1) + tx2.outputs.append(dbt_output) + + dbt_withdraw = NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=100) + tx2_nano_header = tx2.get_nano_header() + tx2_nano_header.nc_actions.append(dbt_withdraw) + + artifacts.propagate_with(self.manager, up_to='b11') + assert tx1.get_metadata().first_block == b11.hash + assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx1.get_metadata().voided_by is None + + artifacts.propagate_with(self.manager, up_to='b12') + assert tx2.get_metadata().first_block == b12.hash + assert tx2.get_metadata().nc_execution == NCExecutionState.FAILURE + assert tx2.get_metadata().voided_by == {NC_EXECUTION_FAIL_ID, tx2.hash} + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx2.hash, + block_id=b12.hash, + reason='InputOutputMismatch: Fee amount is different than expected. (amount=1, expected=0)', + ) + + def test_postponed_verification_fail_less_htr_balance(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.nc_id = tx1 + tx2.nc_method = create_fee_token() + tx2.fee = 1 HTR + tx2.out[0] = 1000 HTR + + tx1 < b11 < tx2 + tx1 <-- b11 + tx2 <-- b12 + ''') + + b11, b12 = artifacts.get_typed_vertices(('b11', 'b12'), Block) + tx1, tx2 = artifacts.get_typed_vertices(('tx1', 'tx2'), Transaction) + + fbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='FBT') + tx2.tokens.append(fbt_id) + + removed_htr_output = tx2.outputs.pop() + assert removed_htr_output.token_data == 0 + assert removed_htr_output.value == 1000 + fbt_output = TxOutput(value=10 ** 9, script=b'', token_data=1) + tx2.outputs.append(fbt_output) + + fbt_withdraw = NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10 ** 9) + tx2_nano_header = tx2.get_nano_header() + tx2_nano_header.nc_actions.append(fbt_withdraw) + + artifacts.propagate_with(self.manager, up_to='b11') + assert tx1.get_metadata().first_block == b11.hash + assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx1.get_metadata().voided_by is None + + # Verification of minting HTR is not postponed, so it fails in verification-time. + with pytest.raises(Exception) as e: + artifacts.propagate_with(self.manager, up_to='tx2') + + assert isinstance(e.value.__cause__, InvalidNewTransaction) + assert e.value.__cause__.args[0] == ( + 'full validation failed: HTR balance is different than expected. (amount=-1000, expected=0)' + ) + + def test_postponed_verification_fail_more_htr_balance(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.nc_id = tx1 + tx2.nc_method = create_fee_token() + tx2.fee = 1 HTR + + tx1 < tx2 + tx1 <-- b11 + tx2 <-- b12 + ''') + + b11, b12 = artifacts.get_typed_vertices(('b11', 'b12'), Block) + tx1, tx2 = artifacts.get_typed_vertices(('tx1', 'tx2'), Transaction) + + fbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='FBT') + tx2.tokens.append(fbt_id) + + fbt_output = TxOutput(value=10 ** 9, script=b'', token_data=1) + extra_htr_output = TxOutput(value=1000, script=b'') + tx2.outputs.append(fbt_output) + tx2.outputs.append(extra_htr_output) + + fbt_withdraw = NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10 ** 9) + tx2_nano_header = tx2.get_nano_header() + tx2_nano_header.nc_actions.append(fbt_withdraw) + + artifacts.propagate_with(self.manager, up_to='b11') + assert tx1.get_metadata().first_block == b11.hash + assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx1.get_metadata().voided_by is None + + artifacts.propagate_with(self.manager, up_to='b12') + assert tx2.get_metadata().first_block == b12.hash + assert tx2.get_metadata().nc_execution == NCExecutionState.FAILURE + assert tx2.get_metadata().voided_by == {NC_EXECUTION_FAIL_ID, tx2.hash} + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx2.hash, + block_id=b12.hash, + reason='InputOutputMismatch: HTR balance is different than expected. (amount=1000, expected=0)', + ) + + def test_postponed_verification_pay_fee_with_fbt(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.nc_id = tx1 + tx2.nc_method = create_fee_token() + + tx1 < tx2 + tx1 <-- b11 + tx2 <-- b12 + ''') + + b11, b12 = artifacts.get_typed_vertices(('b11', 'b12'), Block) + tx1, tx2 = artifacts.get_typed_vertices(('tx1', 'tx2'), Transaction) + + fbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='FBT') + tx2.tokens.append(fbt_id) + + fbt_output = TxOutput(value=10 ** 9 - 100, script=b'', token_data=1) + tx2.outputs.append(fbt_output) + + fbt_withdraw = NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10 ** 9) + tx2_nano_header = tx2.get_nano_header() + tx2_nano_header.nc_actions.append(fbt_withdraw) + + fee_entry = FeeHeaderEntry(token_index=1, amount=100) + fee_header = FeeHeader(self._settings, tx2, [fee_entry]) + tx2.headers.append(fee_header) + + artifacts.propagate_with(self.manager, up_to='b11') + assert tx1.get_metadata().first_block == b11.hash + assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx1.get_metadata().voided_by is None + + artifacts.propagate_with(self.manager, up_to='b12') + assert tx2.get_metadata().first_block == b12.hash + assert tx2.get_metadata().nc_execution == NCExecutionState.FAILURE + assert tx2.get_metadata().voided_by == {NC_EXECUTION_FAIL_ID, tx2.hash} + + assert_nc_failure_reason( + manager=self.manager, + tx_id=tx2.hash, + block_id=b12.hash, + reason=f'InvalidToken: token {fbt_id.hex()} cannot be used to pay fees', + ) + + def test_postponed_verification_tx_spending_nano(self) -> None: + artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + tx1.nc_id = "{self.blueprint_id.hex()}" + tx1.nc_method = initialize() + tx1.nc_deposit = 1 HTR + + tx2.nc_id = tx1 + tx2.nc_method = create_fee_token() + tx2.fee = 1 HTR + + tx3.fee = 1 HTR + + tx1 < b11 < tx2 < tx3 < b12 + tx1 <-- b11 + tx2 <-- b12 + ''') + + b11, b12 = artifacts.get_typed_vertices(('b11', 'b12'), Block) + tx1, tx2, tx3 = artifacts.get_typed_vertices(('tx1', 'tx2', 'tx3'), Transaction) + + fbt_id = derive_child_token_id(ContractId(tx1.hash), token_symbol='FBT') + tx2.tokens.append(fbt_id) + tx3.tokens.append(fbt_id) + + fbt_output = TxOutput(value=10 ** 9, script=b'', token_data=1) + tx2.outputs.append(fbt_output) + tx3.outputs.append(fbt_output) + + fbt_withdraw = NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10 ** 9) + tx2_nano_header = tx2.get_nano_header() + tx2_nano_header.nc_actions.append(fbt_withdraw) + + fbt_input = TxInput(tx_id=tx2.hash, index=len(tx2.outputs) - 1, data=bytes([Opcode.OP_1])) + tx3.inputs.append(fbt_input) + + artifacts.propagate_with(self.manager, up_to='b11') + assert tx1.get_metadata().first_block == b11.hash + assert tx1.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx1.get_metadata().voided_by is None + + with pytest.raises(Exception) as e: + artifacts.propagate_with(self.manager, up_to='tx3') + + assert isinstance(e.value.__cause__, InvalidNewTransaction) + assert e.value.__cause__.args[0] == f'full validation failed: token uid {fbt_id.hex()} not found.' + + assert self.manager.vertex_handler.on_new_relayed_vertex(b12) + assert tx2.get_metadata().first_block == b12.hash + assert tx2.get_metadata().nc_execution == NCExecutionState.SUCCESS + assert tx2.get_metadata().voided_by is None + + # Now, it's valid and accepted. + assert self.manager.vertex_handler.on_new_relayed_vertex(tx3) + assert tx3.get_metadata().validation.is_valid() + assert tx3.get_metadata().voided_by is None diff --git a/tests/nanocontracts/test_indexes2.py b/tests/nanocontracts/test_indexes2.py index 68006cc5c..0d4b35db7 100644 --- a/tests/nanocontracts/test_indexes2.py +++ b/tests/nanocontracts/test_indexes2.py @@ -12,8 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.conf.settings import HATHOR_TOKEN_UID -from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts import HATHOR_TOKEN_UID, Blueprint, Context, public from hathor.nanocontracts.types import ContractId, VertexId from hathor.nanocontracts.utils import derive_child_token_id from hathor.transaction import Transaction diff --git a/tests/nanocontracts/test_syscalls.py b/tests/nanocontracts/test_syscalls.py index d46f77f8e..0a3e90f97 100644 --- a/tests/nanocontracts/test_syscalls.py +++ b/tests/nanocontracts/test_syscalls.py @@ -2,7 +2,7 @@ import pytest -from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts import HATHOR_TOKEN_UID from hathor.nanocontracts.blueprint import Blueprint from hathor.nanocontracts.context import Context from hathor.nanocontracts.exception import NCInsufficientFunds, NCInvalidSyscall @@ -371,7 +371,7 @@ def test_fee_token_creation(self) -> None: fbt2_balance_key: Balance(value=1000000, can_mint=True, can_melt=True), } - # Try to create fee tokens without enough dbt balances + # Try to create fee tokens without enough dbt balance msg = f'negative balance for contract {nc_id.hex()}' with pytest.raises(NCInsufficientFunds, match=msg): self.runner.call_public_method( @@ -409,7 +409,7 @@ def test_fee_token_creation(self) -> None: TokenUid(HATHOR_TOKEN_UID) ) - # created fee token paying with deposit token + # Balance should remain unchanged after failed melt attempt assert storage.get_all_balances() == { htr_balance_key: Balance(value=0, can_mint=False, can_melt=False), fbt_balance_key: Balance(value=1000000, can_mint=True, can_melt=True), @@ -456,7 +456,7 @@ def test_fee_token_melt(self) -> None: # Successfully melt some tokens - don't deposit, melt from existing balance using deposit token self.runner.call_public_method(nc_id, 'melt', self.create_context(), token_uid, 500000, dbt_token_uid) - # Balance should decrease by melted amount, HTR consumed for fee + # Balance should decrease by melted amount, DBT consumed for fee assert storage.get_all_balances() == { htr_balance_key: Balance(value=0, can_mint=False, can_melt=False), fbt_balance_key: Balance(value=500000, can_mint=True, can_melt=True), diff --git a/tests/nanocontracts/test_token_creation.py b/tests/nanocontracts/test_token_creation.py index e7bf71e8a..969b01006 100644 --- a/tests/nanocontracts/test_token_creation.py +++ b/tests/nanocontracts/test_token_creation.py @@ -1,3 +1,4 @@ +from unittest.mock import Mock from hathor.conf import HathorSettings from hathor.nanocontracts import NC_EXECUTION_FAIL_ID @@ -148,7 +149,7 @@ def test_token_creation_by_vertex(self) -> None: Balance(value=7, can_mint=False, can_melt=False) ) - jkl_token_info = JKL._get_token_info_from_inputs() + jkl_token_info = JKL._get_token_info_from_inputs(Mock()) JKL._update_token_info_from_outputs(token_dict=jkl_token_info) assert jkl_token_info[settings.HATHOR_TOKEN_UID].amount == -2 diff --git a/tests/poa/test_poa_verification.py b/tests/poa/test_poa_verification.py index c87efebe7..ca05103e4 100644 --- a/tests/poa/test_poa_verification.py +++ b/tests/poa/test_poa_verification.py @@ -80,7 +80,7 @@ def test_poa_block_verify_basic(self) -> None: patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), patch.object(PoaBlockVerifier, 'verify_poa', verify_poa_wrapped), ): - self.manager.verification_service.verify_basic(block, self.verification_params) + self.manager.verification_service.verify_basic(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -111,7 +111,7 @@ def test_poa_block_verify_without_storage(self) -> None: patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(block, self.verification_params) + self.manager.verification_service.verify_without_storage(block, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -153,7 +153,7 @@ def test_poa_block_verify(self) -> None: patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -185,7 +185,7 @@ def test_poa_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), patch.object(PoaBlockVerifier, 'verify_poa', verify_poa_wrapped), ): - self.manager.verification_service.validate_basic(block, self.verification_params) + self.manager.verification_service.validate_basic(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -199,7 +199,7 @@ def test_poa_block_validate_basic(self) -> None: self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(block, self.verification_params) + self.manager.verification_service.validate_full(block, self.get_verification_params(self.manager)) self.assertEqual(block.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -210,7 +210,7 @@ def test_poa_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), ): - self.manager.verification_service.validate_basic(block, self.verification_params) + self.manager.verification_service.validate_basic(block, self.get_verification_params(self.manager)) # Block methods verify_weight_wrapped2.assert_not_called() @@ -256,7 +256,7 @@ def test_poa_block_validate_full(self) -> None: patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), patch.object(PoaBlockVerifier, 'verify_poa', verify_poa_wrapped), ): - self.manager.verification_service.validate_full(block, self.verification_params) + self.manager.verification_service.validate_full(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() diff --git a/tests/tx/test_fee_tokens.py b/tests/tx/test_fee_tokens.py index ca155793f..7af49c551 100644 --- a/tests/tx/test_fee_tokens.py +++ b/tests/tx/test_fee_tokens.py @@ -1,3 +1,17 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import pytest from hathor.conf.settings import NanoContractsSetting @@ -5,19 +19,16 @@ from hathor.exception import InvalidNewTransaction from hathor.indexes.tokens_index import TokenUtxoInfo from hathor.transaction import Transaction, TxInput, TxOutput -from hathor.transaction.exceptions import ForbiddenMelt, InputOutputMismatch, TransactionDataError +from hathor.transaction.exceptions import ForbiddenMelt, HeaderNotSupported, InputOutputMismatch +from hathor.transaction.headers import FeeHeader +from hathor.transaction.headers.fee_header import FeeHeaderEntry from hathor.transaction.scripts import P2PKH from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.token_info import TokenVersion from hathor.transaction.util import get_deposit_token_withdraw_amount from tests import unittest -from tests.utils import ( - add_blocks_unlock_reward, - create_fee_tokens, - create_tokens, - get_deposit_token_amount_from_htr, - get_genesis_key, -) +from tests.dag_builder.builder import TestDAGBuilder +from tests.utils import add_blocks_unlock_reward, create_fee_tokens, create_tokens, get_genesis_key class FeeTokenTest(unittest.TestCase): @@ -74,6 +85,7 @@ def test_fee_token_melt(self) -> None: TxOutput(100, script, 1), # Melt authority TxOutput(TxOutput.TOKEN_MELT_MASK, script, 0b10000001), + TxOutput(htr_amount - 4, script, 0) ] tx2 = Transaction( @@ -85,14 +97,19 @@ def test_fee_token_melt(self) -> None: storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) + + fee_header = FeeHeader( + settings=self._settings, + tx=tx2, + fees=[FeeHeaderEntry(token_index=0, amount=4)] + ) + tx2.headers.append(fee_header) + # Melt 100 tokens from fee_token and add 4 outputs, should charge only by the outputs count - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) self.assertEqual(tx_fee, 4) - change_value = htr_amount - tx_fee - # 100 htr - 4 htr (fee) - self.assertEqual(change_value, 96) - outputs.append(TxOutput(change_value, script, 0)) - + self.assertEqual(tx_fee, fee_header.total_fee_amount()) # It's the tx item output signature # this signature_data allows the tx output to be spent by the tx2 inputs self.sign_inputs(tx2) @@ -120,7 +137,8 @@ def test_fee_token_melt(self) -> None: outputs = [ TxOutput(100, script, 2), TxOutput(100, script, 2), - TxOutput(100, script, 2) + TxOutput(100, script, 2), + TxOutput(96-5, script, 0) ] tx3 = Transaction( @@ -132,16 +150,19 @@ def test_fee_token_melt(self) -> None: storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) + fee_header = FeeHeader( + settings=self._settings, + tx=tx3, + fees=[FeeHeaderEntry(token_index=0, amount=5)], + ) + tx3.headers.append(fee_header) # melting 2 tokens without outputs, should charge FEE_PER_OUT * 2 = 2 # melting 1 token with outputs, should charge 1 per non-authority output = 3 - tx3_fee = tx3.get_complete_token_info().calculate_fee(self.manager._settings) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx3_fee = tx3.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) # Multiple inputs should be only charge once per token when no outputs are present self.assertEqual(tx3_fee, 5) - tx3_change_value = change_value - tx3_fee - # tx2 change value - fee: 96 - 5 - self.assertEqual(tx3_change_value, 91) - tx3.outputs.append(TxOutput(tx3_change_value, script, 0)) self.sign_inputs(tx3) self.resolve_and_propagate(tx3) @@ -166,8 +187,9 @@ def test_fee_token_melt_without_authority(self) -> None: ] outputs = [ - # New token amount + # New token amount - 500 - 100 = 400 TxOutput(new_token_amount, script, 1), + TxOutput(4, script, 0) ] tx2 = Transaction( @@ -179,12 +201,19 @@ def test_fee_token_melt_without_authority(self) -> None: storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) + + fee_header = FeeHeader( + settings=self._settings, + tx=tx2, + fees=[FeeHeaderEntry(token_index=0, amount=1)], + ) + tx2.headers.append(fee_header) + # pick the last tip tx output in HTR then subtracts the fee - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) self.assertEqual(tx_fee, 1) - change_value = htr_amount - tx_fee - self.assertEqual(change_value, 4) - outputs.append(TxOutput(change_value, script, 0)) + self.assertEqual(tx_fee, fee_header.total_fee_amount()) # It's the tx item output signature # this signature_data allows the tx output to be spent by the tx2 inputs @@ -212,22 +241,33 @@ def test_fee_token_melt_without_output(self) -> None: TxInput(tx.hash, 4, b'') ] + outputs = [ + TxOutput(4, script, 0) + ] + tx2 = Transaction( weight=1, inputs=inputs, - outputs=[], + outputs=outputs, parents=parents, tokens=[token_uid], storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) + + fee_header = FeeHeader( + settings=self._settings, + tx=tx2, + fees=[FeeHeaderEntry(token_index=0, amount=1)], + ) + tx2.headers.append(fee_header) + # pick the last tip tx output in HTR then subtracts the fee - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) # check if only the melting operation was considered self.assertEqual(tx_fee, 1) - change_value = htr_amount - tx_fee - self.assertEqual(change_value, 4) - tx2.outputs.append(TxOutput(change_value, script, 0)) + self.assertEqual(tx_fee, fee_header.total_fee_amount()) self.sign_inputs(tx2) self.resolve_and_propagate(tx2) @@ -261,6 +301,8 @@ def test_fee_token_melt_paid_with_deposit(self) -> None: TxOutput(new_token_amount, script, 1), # Melt authority TxOutput(TxOutput.TOKEN_MELT_MASK, script, 0b10000001), + # change value: 500 from initial mint amount - 100 fee + TxOutput(400, script, 2) ] tx2 = Transaction( @@ -273,12 +315,17 @@ def test_fee_token_melt_paid_with_deposit(self) -> None: timestamp=int(self.clock.seconds()) ) - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) + fee_header = FeeHeader( + settings=self._settings, + tx=tx2, + fees=[FeeHeaderEntry(token_index=2, amount=100)], + ) + tx2.headers.append(fee_header) + + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) self.assertEqual(tx_fee, 1) - change_value = initial_mint_amount - get_deposit_token_amount_from_htr(tx_fee) - # 500 from initial mint amount - 100 fee - self.assertEqual(change_value, 400) - outputs.append(TxOutput(change_value, script, 2)) + self.assertEqual(tx_fee, fee_header.total_fee_amount()) # It's the signature of the output of the tx item # this signature_data allows the tx output to be spent by the tx2 inputs @@ -295,7 +342,7 @@ def test_fee_token_melt_paid_with_deposit(self) -> None: token_amount=new_token_amount ) tokens_index = self.manager.tx_storage.indexes.tokens.get_token_info(deposit_token_uid) - self.assertEqual(change_value, tokens_index.get_total()) + self.assertEqual(400, tokens_index.get_total()) def test_fee_and_deposit_token_melt_paid_with_deposit(self) -> None: # fbt -> Fee based token @@ -333,6 +380,8 @@ def test_fee_and_deposit_token_melt_paid_with_deposit(self) -> None: TxOutput(TxOutput.TOKEN_MELT_MASK, script, 0b10000001), # HTR change output TxOutput(htr_change_value, script, 0), + # deposit token change output: 500 - 100(fee in the header) - 200(melt) = 200 + TxOutput(200, script, 2) ] tx2 = Transaction( @@ -345,16 +394,17 @@ def test_fee_and_deposit_token_melt_paid_with_deposit(self) -> None: timestamp=int(self.clock.seconds()) ) - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) + fee_header = FeeHeader( + settings=self._settings, + tx=tx2, + fees=[FeeHeaderEntry(token_index=2, amount=100)], + ) + tx2.headers.append(fee_header) + + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) self.assertEqual(tx_fee, 1) - # Deposit token change and melt in the same - deposit_token_change_value = ( - initial_mint_amount - - get_deposit_token_amount_from_htr(tx_fee) - - dbt_melt_amount) - # 500 - 100 - 200 = 200 - self.assertEqual(deposit_token_change_value, 200) - outputs.append(TxOutput(deposit_token_change_value, script, 2)) + self.assertEqual(tx_fee, fee_header.total_fee_amount()) # It's the signature of the output of the tx item # this signature_data allows the tx output to be spent by the tx2 inputs @@ -371,7 +421,7 @@ def test_fee_and_deposit_token_melt_paid_with_deposit(self) -> None: token_amount=new_token_amount ) tokens_index = self.manager.tx_storage.indexes.tokens.get_token_info(deposit_token_uid) - self.assertEqual(deposit_token_change_value, tokens_index.get_total()) + self.assertEqual(200, tokens_index.get_total()) def test_fee_token_tx_paid_with_htr_and_deposit(self) -> None: # fbt -> Fee based token @@ -420,171 +470,25 @@ def test_fee_token_tx_paid_with_htr_and_deposit(self) -> None: storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) - fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) - self.assertEqual(fee, 5) - - # It's the signature of the output of the tx item - # this signature_data allows the tx output to be spent by the tx2 inputs - self.sign_inputs(tx2) - self.resolve_and_propagate(tx2) - - def test_fee_token_melt_and_deposit_token_to_pay_the_fee_without_melt_authority(self) -> None: - # fbt -> Fee based token - # dbt -> deposit based token - initial_mint_amount = 500 - fbt_tx = create_fee_tokens(self.manager, self.address_b58, initial_mint_amount) - dbt_tx = create_tokens(self.manager, self.address_b58, initial_mint_amount, use_genesis=False) - fbt_token_uid = fbt_tx.tokens[0] - dbt_token_uid = dbt_tx.tokens[0] - parents = self.manager.get_new_tx_parents() - script = P2PKH.create_output_script(self.address) - - # melt tokens and transfer melt authority - melt_amount = 100 - new_fbt_amount = initial_mint_amount - melt_amount - - inputs = [ - # token amount - TxInput(fbt_tx.hash, 0, b''), - # Fee token melt authority - TxInput(fbt_tx.hash, 2, b''), - # Deposit token to pay the fee - TxInput(dbt_tx.hash, 0, b''), - ] - - outputs = [ - # New fbt token amount - TxOutput(new_fbt_amount, script, 1), - # Melt authority - fbt_tx.outputs[2], - # HTR change output - TxOutput(4, script, 0) - ] - tx2 = Transaction( - weight=1, - inputs=inputs, - outputs=outputs, - parents=parents, - tokens=[fbt_token_uid, dbt_token_uid], - storage=self.manager.tx_storage, - timestamp=int(self.clock.seconds()) + fee_header = FeeHeader( + settings=self._settings, + tx=tx2, + fees=[ + FeeHeaderEntry(token_index=0, amount=3), + FeeHeaderEntry(token_index=2, amount=200) + ], ) - - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) - self.assertEqual(tx_fee, 1) - dbt_melt_amount = 100 - # Deposit token change and melt in the same - dbt_change_value = ( - initial_mint_amount - - get_deposit_token_amount_from_htr(tx_fee) - - dbt_melt_amount) - # 500 - 100 - 100 = 300 - self.assertEqual(dbt_change_value, 300) - outputs.append(TxOutput(dbt_change_value, script, 2)) + tx2.headers.append(fee_header) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) + self.assertEqual(tx_fee, 5) + self.assertEqual(tx_fee, fee_header.total_fee_amount()) # It's the signature of the output of the tx item # this signature_data allows the tx output to be spent by the tx2 inputs self.sign_inputs(tx2) - with pytest.raises(InvalidNewTransaction) as e: - self.resolve_and_propagate(tx2) - assert 'Melting tokens without a melt authority is forbidden' in str(e.value) - - # check total amount of tokens - self.check_tokens_index( - token_uid=fbt_token_uid, - mint_tx_hash=fbt_tx.hash, - mint_output=1, - melt_tx_hash=fbt_tx.hash, - melt_output=2, - token_amount=initial_mint_amount - ) - self.check_tokens_index( - token_uid=dbt_token_uid, - mint_tx_hash=dbt_tx.hash, - mint_output=1, - melt_tx_hash=dbt_tx.hash, - melt_output=2, - token_amount=initial_mint_amount - ) - - def test_fee_token_melt_deposit_token_with_invalid_amount(self) -> None: - # fbt -> Fee based token - # dbt -> deposit based token - initial_mint_amount = 500 - fbt_tx = create_fee_tokens(self.manager, self.address_b58, initial_mint_amount) - dbt_tx = create_tokens(self.manager, self.address_b58, initial_mint_amount, use_genesis=False) - fbt_token_uid = fbt_tx.tokens[0] - dbt_token_uid = dbt_tx.tokens[0] - parents = self.manager.get_new_tx_parents() - script = P2PKH.create_output_script(self.address) - - # melt tokens and transfer melt authority - melt_amount = 100 - new_fbt_amount = initial_mint_amount - melt_amount - - inputs = [ - # token amount - TxInput(fbt_tx.hash, 0, b''), - # Fee token melt authority - TxInput(fbt_tx.hash, 2, b''), - # Deposit token to pay the fee - TxInput(dbt_tx.hash, 0, b''), - ] - - outputs = [ - # New fbt token amount - TxOutput(new_fbt_amount, script, 1), - # Melt authority - fbt_tx.outputs[2] - ] - - tx2 = Transaction( - weight=1, - inputs=inputs, - outputs=outputs, - parents=parents, - tokens=[fbt_token_uid, dbt_token_uid], - storage=self.manager.tx_storage, - timestamp=int(self.clock.seconds()) - ) - - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) - self.assertEqual(tx_fee, 1) - dbt_invalid_melt_amount = 99 - # Deposit token change and melt in the same - dbt_change_value = ( - initial_mint_amount - - get_deposit_token_amount_from_htr(tx_fee) - - dbt_invalid_melt_amount) - # 500 - 100 - 99 = 301 - self.assertEqual(dbt_change_value, 301) - outputs.append(TxOutput(dbt_change_value, script, 2)) - - # It's the signature of the output of the tx item - # this signature_data allows the tx output to be spent by the tx2 inputs - self.sign_inputs(tx2) - with pytest.raises(InvalidNewTransaction) as e: - self.resolve_and_propagate(tx2) - assert 'Paying fees with non integer amount is forbidden' in str(e.value) - - # check total amount of tokens - self.check_tokens_index( - token_uid=fbt_token_uid, - mint_tx_hash=fbt_tx.hash, - mint_output=1, - melt_tx_hash=fbt_tx.hash, - melt_output=2, - token_amount=initial_mint_amount - ) - self.check_tokens_index( - token_uid=dbt_token_uid, - mint_tx_hash=dbt_tx.hash, - mint_output=1, - melt_tx_hash=dbt_tx.hash, - melt_output=2, - token_amount=initial_mint_amount - ) + self.resolve_and_propagate(tx2) def test_fee_token_mint(self) -> None: # fbt -> Fee based token @@ -612,7 +516,9 @@ def test_fee_token_mint(self) -> None: # Token minted output TxOutput(mint_amount, script, 1), # Token mint authority - TxOutput(TxOutput.TOKEN_MINT_MASK, script, 0b10000001) + TxOutput(TxOutput.TOKEN_MINT_MASK, script, 0b10000001), + # change amount + TxOutput(4, script, 0) ] tx2 = Transaction( @@ -624,12 +530,17 @@ def test_fee_token_mint(self) -> None: storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) + fee_header = FeeHeader( + settings=self._settings, + tx=tx2, + fees=[FeeHeaderEntry(token_index=0, amount=1)], + ) + tx2.headers.append(fee_header) # pick the last tip tx output in HTR then subtracts the fee - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) self.assertEqual(tx_fee, 1) - change_value = htr_amount - tx_fee - self.assertEqual(change_value, 4) - outputs.append(TxOutput(change_value, script, 0)) + self.assertEqual(fee_header.total_fee_amount(), 1) # It's the signature of the output of the tx item # this signature_data allows the tx output to be spent by the tx2 inputs @@ -676,8 +587,9 @@ def test_fee_token_tx_without_paying(self) -> None: storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) - fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) - self.assertEqual(fee, 2) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) + self.assertEqual(tx_fee, 2) # It's the signature of the output of the tx item # this signature_data allows the tx output to be spent by the tx2 inputs @@ -686,7 +598,7 @@ def test_fee_token_tx_without_paying(self) -> None: with pytest.raises(InvalidNewTransaction) as e: self.resolve_and_propagate(tx2) assert isinstance(e.value.__cause__, InputOutputMismatch) - assert "HTR balance is different than expected. (amount=0, expected=-2)" in str(e.value) + assert "Fee amount is different than expected. (amount=0, expected=2)" in str(e.value) def test_fee_token_burn_authority(self) -> None: initial_mint_amount = 500 @@ -708,8 +620,8 @@ def test_fee_token_burn_authority(self) -> None: storage=self.manager.tx_storage, timestamp=int(self.clock.seconds()) ) - - tx_fee = tx2.get_complete_token_info().calculate_fee(self.manager._settings) + nc_storage = self.manager.get_nc_block_storage(self.manager.tx_storage.get_best_block()) + tx_fee = tx2.get_complete_token_info(nc_storage).calculate_fee(self.manager._settings) self.assertEqual(tx_fee, 0) # It's the signature of the output of the tx item @@ -726,9 +638,9 @@ def test_fee_token_activation(self) -> None: ) with pytest.raises(InvalidNewTransaction) as e: create_fee_tokens(custom_manager, self.address_b58) - assert isinstance(e.value.__cause__, TransactionDataError) + assert isinstance(e.value.__cause__, HeaderNotSupported) # 2 is the TokenVersion.FEE enum value - assert "full validation failed: Invalid token version (2)" in str(e.value) + assert "Header `FeeHeader` not supported by `TokenCreationTransaction`" in str(e.value) def test_verify_token_info(self) -> None: """ @@ -843,3 +755,25 @@ def resolve_and_propagate(self, tx: Transaction) -> None: self.manager.cpu_mining_service.resolve(tx) self.manager.propagate_tx(tx) self.run_to_completion() + + def test_pay_fee_with_fbt(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(''' + blockchain genesis b[1..10] + b10 < dummy + + FBT.token_version = fee + FBT.fee = 1 HTR + + tx1.out[0] = 123 FBT + tx1.fee = 100 FBT + ''') + + fbt = artifacts.get_typed_vertex('FBT', Transaction) + artifacts.propagate_with(self.manager, up_to='FBT') + + with pytest.raises(Exception) as e: + artifacts.propagate_with(self.manager, up_to='tx1') + + assert isinstance(e.value.__cause__, InvalidNewTransaction) + assert e.value.__cause__.args[0] == f'full validation failed: token {fbt.hash_hex} cannot be used to pay fees' diff --git a/tests/tx/test_genesis.py b/tests/tx/test_genesis.py index 037fdf1ec..ba930c539 100644 --- a/tests/tx/test_genesis.py +++ b/tests/tx/test_genesis.py @@ -55,7 +55,7 @@ def test_pow(self): def test_verify(self): genesis = self.storage.get_all_genesis() for g in genesis: - self._verification_service.verify_without_storage(g, self.verification_params) + self._verification_service.verify_without_storage(g, self.get_verification_params()) def test_output(self): # Test if block output is valid diff --git a/tests/tx/test_reward_lock.py b/tests/tx/test_reward_lock.py index 466cf0996..bb9cb11a5 100644 --- a/tests/tx/test_reward_lock.py +++ b/tests/tx/test_reward_lock.py @@ -76,7 +76,7 @@ def test_classic_reward_lock(self) -> None: tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) add_new_blocks(self.manager, 1, advance_clock=1) # now it should be spendable @@ -136,7 +136,7 @@ def test_mempool_tx_with_not_enough_height(self) -> None: tx, _ = self._spend_reward_tx(self.manager, reward_block) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) with self.assertRaises(InvalidNewTransaction): self.assertTrue(self.manager.on_new_tx(tx)) @@ -179,7 +179,7 @@ def test_mempool_tx_invalid_after_reorg(self) -> None: # now the new tx should not pass verification considering the reward lock with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # the transaction should have been removed from the mempool self.assertNotIn(tx, self.manager.tx_storage.iter_mempool_from_best_index()) @@ -219,7 +219,7 @@ def test_classic_reward_lock_timestamp_expected_to_fail(self) -> None: self.manager.cpu_mining_service.resolve(tx) self.assertEqual(tx.static_metadata.min_height, unlock_height) with self.assertRaises(RewardLocked): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_removed_tx_confirmed_by_orphan_block(self) -> None: manager = self.create_peer('unittests') diff --git a/tests/tx/test_tokens.py b/tests/tx/test_tokens.py index 8230ecee4..cf8861a2b 100644 --- a/tests/tx/test_tokens.py +++ b/tests/tx/test_tokens.py @@ -51,7 +51,7 @@ def test_tokens_in_block(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(BlockWithTokensError): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) def test_tx_token_outputs(self): genesis_block = self.genesis_blocks[0] @@ -71,7 +71,7 @@ def test_tx_token_outputs(self): tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # with 1 token uid in list tx.tokens = [bytes.fromhex('0023be91834c973d6a6ddd1a0ae411807b7c8ef2a015afb5177ee64b666ce602')] @@ -81,7 +81,7 @@ def test_tx_token_outputs(self): tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # try hathor authority UTXO output = TxOutput(value, script, 0b10000000) @@ -91,7 +91,7 @@ def test_tx_token_outputs(self): tx.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_token_transfer(self): wallet = self.manager.wallet @@ -112,7 +112,7 @@ def test_token_transfer(self): tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx2) tx2.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx2, self.verification_params) + self.manager.verification_service.verify(tx2, self.get_verification_params(self.manager)) # missing tokens token_output = TxOutput(utxo.value - 1, script, 1) @@ -125,7 +125,7 @@ def test_token_transfer(self): self.manager.cpu_mining_service.resolve(tx3) tx3.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx3, self.verification_params) + self.manager.verification_service.verify(tx3, self.get_verification_params(self.manager)) def test_token_mint(self): wallet = self.manager.wallet @@ -192,7 +192,7 @@ def test_token_mint(self): tx3.inputs[0].data = data self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx3, self.verification_params) + self.manager.verification_service.verify(tx3, self.get_verification_params(self.manager)) # try to mint and deposit less tokens than necessary mint_amount = 10000000 @@ -218,7 +218,7 @@ def test_token_mint(self): tx4.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx4) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx4, self.verification_params) + self.manager.verification_service.verify(tx4, self.get_verification_params(self.manager)) # try to mint using melt authority UTXO _input1 = TxInput(tx.hash, 2, b'') @@ -230,7 +230,7 @@ def test_token_mint(self): tx5.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx5) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx5, self.verification_params) + self.manager.verification_service.verify(tx5, self.get_verification_params(self.manager)) def test_token_melt(self): wallet = self.manager.wallet @@ -302,7 +302,7 @@ def test_token_melt(self): tx3.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx3, self.verification_params) + self.manager.verification_service.verify(tx3, self.get_verification_params(self.manager)) # try to melt using mint authority UTXO _input1 = TxInput(tx.hash, 0, b'') @@ -318,7 +318,7 @@ def test_token_melt(self): tx4.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) self.manager.cpu_mining_service.resolve(tx4) with self.assertRaises(InputOutputMismatch): - self.manager.verification_service.verify(tx4, self.verification_params) + self.manager.verification_service.verify(tx4, self.get_verification_params(self.manager)) def test_token_transfer_authority(self): wallet = self.manager.wallet @@ -337,7 +337,7 @@ def test_token_transfer_authority(self): tx2.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx2) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx2, self.verification_params) + self.manager.verification_service.verify(tx2, self.get_verification_params(self.manager)) # input with melt and output with mint _input1 = TxInput(tx.hash, 2, b'') @@ -349,7 +349,7 @@ def test_token_transfer_authority(self): tx3.inputs[0].data = P2PKH.create_input_data(public_bytes, signature) self.manager.cpu_mining_service.resolve(tx3) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx3, self.verification_params) + self.manager.verification_service.verify(tx3, self.get_verification_params(self.manager)) def test_token_index_with_conflict(self, mint_amount=0): # create a new token and have a mint operation done. The tx that mints the @@ -453,39 +453,39 @@ def update_tx(tx): # max token name length tx.token_name = 'a' * self._settings.MAX_LENGTH_TOKEN_NAME update_tx(tx) - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # max token symbol length tx.token_symbol = 'a' * self._settings.MAX_LENGTH_TOKEN_SYMBOL update_tx(tx) - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # long token name tx.token_name = 'a' * (self._settings.MAX_LENGTH_TOKEN_NAME + 1) update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # long token symbol tx.token_name = 'ValidName' tx.token_symbol = 'a' * (self._settings.MAX_LENGTH_TOKEN_SYMBOL + 1) update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Hathor token name tx.token_name = self._settings.HATHOR_TOKEN_NAME tx.token_symbol = 'TST' update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Hathor token symbol tx.token_name = 'Test' tx.token_symbol = self._settings.HATHOR_TOKEN_SYMBOL update_tx(tx) with self.assertRaises(TransactionDataError): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Token name unicode tx.token_name = 'Test ∞' @@ -493,7 +493,7 @@ def update_tx(tx): token_info = tx.serialize_token_info() TokenCreationTransaction.deserialize_token_info(token_info) update_tx(tx) - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Token symbol unicode tx.token_name = 'Test Token' @@ -501,7 +501,7 @@ def update_tx(tx): token_info = tx.serialize_token_info() TokenCreationTransaction.deserialize_token_info(token_info) update_tx(tx) - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Hathor token version tx.token_name = 'Test' @@ -509,7 +509,7 @@ def update_tx(tx): tx.token_version = TokenVersion.NATIVE update_tx(tx) with pytest.raises(TransactionDataError, match=f'Invalid token version \\({tx.token_version}\\)'): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_token_mint_zero(self): # try to mint 0 tokens @@ -550,7 +550,7 @@ def test_unknown_authority(self): tx2.inputs[1].data = data self.manager.cpu_mining_service.resolve(tx2) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(tx2, self.verification_params) + self.manager.verification_service.verify(tx2, self.get_verification_params(self.manager)) def test_token_info_serialization(self): tx = create_tokens(self.manager, self.address_b58, mint_amount=500) @@ -608,7 +608,7 @@ def test_block_with_htr_authority(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(InvalidToken): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) def test_voided_token_creation(self): tx1 = create_tokens(self.manager, self.address_b58, mint_amount=500, use_genesis=False) diff --git a/tests/tx/test_tx.py b/tests/tx/test_tx.py index 8205268e0..438724d98 100644 --- a/tests/tx/test_tx.py +++ b/tests/tx/test_tx.py @@ -78,8 +78,10 @@ def test_input_output_match_less_htr(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) _input.data = P2PKH.create_input_data(public_bytes, signature) + best_block = self.manager.tx_storage.get_best_block() + block_storage = self.manager.get_nc_block_storage(best_block) with self.assertRaises(InputOutputMismatch): - self._verifiers.tx.verify_sum(tx.get_complete_token_info()) + self._verifiers.tx.verify_sum(self._settings, tx.get_complete_token_info(block_storage)) def test_input_output_match_more_htr(self): genesis_block = self.genesis_blocks[0] @@ -97,8 +99,10 @@ def test_input_output_match_more_htr(self): public_bytes, signature = self.wallet.get_input_aux_data(data_to_sign, self.genesis_private_key) _input.data = P2PKH.create_input_data(public_bytes, signature) + best_block = self.manager.tx_storage.get_best_block() + block_storage = self.manager.get_nc_block_storage(best_block) with self.assertRaises(InputOutputMismatch): - self._verifiers.tx.verify_sum(tx.get_complete_token_info()) + self._verifiers.tx.verify_sum(self._settings, tx.get_complete_token_info(block_storage)) def test_validation(self): # add 100 blocks and check that walking through get_next_block_best_chain yields the same blocks @@ -238,7 +242,7 @@ def test_block_inputs(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(BlockWithInputs): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) def test_merge_mined_no_magic(self): from hathor.merged_mining import MAGIC_NUMBER @@ -456,21 +460,21 @@ def test_tx_number_parents(self): self.manager.cpu_mining_service.resolve(tx) tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # test with 3 parents parents = [tx.hash for tx in self.genesis] tx.parents = parents self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # 2 parents, 1 tx and 1 block parents = [self.genesis_txs[0].hash, self.genesis_blocks[0].hash] tx.parents = parents self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_block_unknown_parent(self): address = get_address_from_public_key(self.genesis_public_key) @@ -489,7 +493,7 @@ def test_block_unknown_parent(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(ParentDoesNotExist): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) def test_block_number_parents(self): address = get_address_from_public_key(self.genesis_public_key) @@ -507,7 +511,7 @@ def test_block_number_parents(self): self.manager.cpu_mining_service.resolve(block) with self.assertRaises(IncorrectParents): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) def test_tx_inputs_out_of_range(self): # we'll try to spend output 3 from genesis transaction, which does not exist @@ -530,7 +534,7 @@ def test_tx_inputs_out_of_range(self): # test with an inexistent index self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # now with index equals of len of outputs _input = [TxInput(genesis_block.hash, len(genesis_block.outputs), data)] @@ -538,7 +542,7 @@ def test_tx_inputs_out_of_range(self): # test with an inexistent index self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # now with inexistent tx hash random_bytes = bytes.fromhex('0000184e64683b966b4268f387c269915cc61f6af5329823a93e3696cb0fe902') @@ -546,7 +550,7 @@ def test_tx_inputs_out_of_range(self): tx.inputs = _input self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(InexistentInput): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_tx_inputs_conflict(self): # the new tx inputs will try to spend the same output @@ -569,7 +573,7 @@ def test_tx_inputs_conflict(self): self.manager.cpu_mining_service.resolve(tx) with self.assertRaises(ConflictingInputs): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_regular_tx(self): # this should succeed @@ -591,7 +595,7 @@ def test_regular_tx(self): self.manager.cpu_mining_service.resolve(tx) tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_tx_weight_too_high(self): parents = [tx.hash for tx in self.genesis_txs] @@ -626,7 +630,7 @@ def test_weight_nan(self): tx.update_hash() self.assertTrue(isnan(tx.weight)) with self.assertRaises(WeightError): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_weight_inf(self): # this should succeed @@ -649,7 +653,7 @@ def test_weight_inf(self): tx.update_hash() self.assertTrue(isinf(tx.weight)) with self.assertRaises(WeightError): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_tx_duplicated_parents(self): # the new tx will confirm the same tx twice @@ -672,7 +676,7 @@ def test_tx_duplicated_parents(self): self.manager.cpu_mining_service.resolve(tx) tx.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) with self.assertRaises(DuplicatedParents): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_update_timestamp(self): parents = [tx for tx in self.genesis_txs] @@ -880,7 +884,7 @@ def test_output_value(self): # 'Manually resolving', to validate verify method tx.hash = bytes.fromhex('012cba011be3c29f1c406f9015e42698b97169dbc6652d1f5e4d5c5e83138858') with self.assertRaises(InvalidOutputValue): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Invalid output value invalid_output = bytes.fromhex('ffffffff') @@ -1116,7 +1120,7 @@ def test_sigops_output_single_above_limit(self) -> None: tx.update_hash() # This calls verify to ensure that verify_sigops_output is being called on verify with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_sigops_output_multi_above_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1129,7 +1133,7 @@ def test_sigops_output_multi_above_limit(self) -> None: tx = Transaction(inputs=[_input], outputs=[output2]*num_outputs, storage=self.tx_storage) tx.update_hash() with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_sigops_output_single_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1166,7 +1170,7 @@ def test_sigops_input_single_above_limit(self) -> None: tx = Transaction(inputs=[input1], outputs=[_output], storage=self.tx_storage) tx.update_hash() with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_sigops_input_multi_above_limit(self) -> None: genesis_block = self.genesis_blocks[0] @@ -1181,7 +1185,7 @@ def test_sigops_input_multi_above_limit(self) -> None: tx = Transaction(inputs=[input2]*num_inputs, outputs=[_output], storage=self.tx_storage) tx.update_hash() with self.assertRaises(TooManySigOps): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) def test_sigops_input_single_below_limit(self) -> None: genesis_block = self.genesis_blocks[0] diff --git a/tests/tx/test_tx_deserialization.py b/tests/tx/test_tx_deserialization.py index 9e1430cb6..38f64f4ac 100644 --- a/tests/tx/test_tx_deserialization.py +++ b/tests/tx/test_tx_deserialization.py @@ -35,7 +35,7 @@ def verbose(key, value): cls = self.get_tx_class() tx = cls.create_from_struct(self.tx_bytes, verbose=verbose) - self._verification_service.verify_without_storage(tx, self.verification_params) + self._verification_service.verify_without_storage(tx, self.get_verification_params()) key, version = v[1] self.assertEqual(key, 'version') diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index 5eb5bd51e..84bd53948 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -5,7 +5,6 @@ from twisted.internet.defer import gatherResults, inlineCallbacks from twisted.internet.threads import deferToThread -from twisted.trial import unittest from hathor.daa import TestMode from hathor.simulator.utils import add_new_blocks @@ -13,7 +12,7 @@ from hathor.transaction.scripts import P2PKH from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.validation_state import ValidationState -from hathor.verification.verification_params import VerificationParams +from tests import unittest from tests.unittest import TestBuilder from tests.utils import BURN_ADDRESS, add_blocks_unlock_reward, add_new_transactions, add_new_tx, create_tokens @@ -22,6 +21,7 @@ class BaseTransactionStorageTest(unittest.TestCase): __test__ = False def setUp(self): + super().setUp() self.tmpdir = tempfile.mkdtemp() builder = TestBuilder() @@ -37,7 +37,6 @@ def setUp(self): self.manager = artifacts.manager self.tx_storage = artifacts.tx_storage self._settings = artifacts.settings - self.verification_params = VerificationParams.default_for_mempool() assert artifacts.wallet is not None @@ -57,7 +56,7 @@ def setUp(self): nonce=100781, storage=self.tx_storage) self.manager.cpu_mining_service.resolve(self.block) self.block.init_static_metadata_from_storage(self._settings, self.tx_storage) - self.manager.verification_service.verify(self.block, self.verification_params) + self.manager.verification_service.verify(self.block, self.get_verification_params(self.manager)) self.block.get_metadata().validation = ValidationState.FULL tx_parents = [tx.hash for tx in self.genesis_txs] @@ -98,7 +97,7 @@ def test_genesis(self): self.assertEqual(1, len(self.genesis_blocks)) self.assertEqual(2, len(self.genesis_txs)) for tx in self.genesis: - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) for tx in self.genesis: tx2 = self.tx_storage.get_transaction(tx.hash) diff --git a/tests/tx/test_verification.py b/tests/tx/test_verification.py index b63ab4e5c..4dc173dbb 100644 --- a/tests/tx/test_verification.py +++ b/tests/tx/test_verification.py @@ -124,7 +124,7 @@ def test_block_verify_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.verify_basic(block, self.verification_params) + self.manager.verification_service.verify_basic(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -154,7 +154,7 @@ def test_block_verify_without_storage(self) -> None: patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(block, self.verification_params) + self.manager.verification_service.verify_without_storage(block, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -196,7 +196,7 @@ def test_block_verify(self) -> None: patch.object(BlockVerifier, 'verify_height', verify_height_wrapped), patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -226,7 +226,7 @@ def test_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.validate_basic(block, self.verification_params) + self.manager.verification_service.validate_basic(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -239,7 +239,7 @@ def test_block_validate_basic(self) -> None: self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(block, self.verification_params) + self.manager.verification_service.validate_full(block, self.get_verification_params(self.manager)) self.assertEqual(block.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -250,7 +250,7 @@ def test_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), ): - self.manager.verification_service.validate_basic(block, self.verification_params) + self.manager.verification_service.validate_basic(block, self.get_verification_params(self.manager)) # Block methods verify_weight_wrapped2.assert_not_called() @@ -294,7 +294,7 @@ def test_block_validate_full(self) -> None: patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), ): - self.manager.verification_service.validate_full(block, self.verification_params) + self.manager.verification_service.validate_full(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -327,7 +327,7 @@ def test_merge_mined_block_verify_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.verify_basic(block, self.verification_params) + self.manager.verification_service.verify_basic(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -357,7 +357,7 @@ def test_merge_mined_block_verify_without_storage(self) -> None: patch.object(BlockVerifier, 'verify_data', verify_data_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(block, self.verification_params) + self.manager.verification_service.verify_without_storage(block, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -402,7 +402,7 @@ def test_merge_mined_block_verify(self) -> None: patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), ): - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -435,7 +435,7 @@ def test_merge_mined_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped), ): - self.manager.verification_service.validate_basic(block, self.verification_params) + self.manager.verification_service.validate_basic(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -448,7 +448,7 @@ def test_merge_mined_block_validate_basic(self) -> None: self.assertEqual(block.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(block, self.verification_params) + self.manager.verification_service.validate_full(block, self.get_verification_params(self.manager)) self.assertEqual(block.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -459,7 +459,7 @@ def test_merge_mined_block_validate_basic(self) -> None: patch.object(BlockVerifier, 'verify_weight', verify_weight_wrapped2), patch.object(BlockVerifier, 'verify_reward', verify_reward_wrapped2), ): - self.manager.verification_service.validate_basic(block, self.verification_params) + self.manager.verification_service.validate_basic(block, self.get_verification_params(self.manager)) # Block methods verify_weight_wrapped2.assert_not_called() @@ -506,7 +506,7 @@ def test_merge_mined_block_validate_full(self) -> None: patch.object(BlockVerifier, 'verify_mandatory_signaling', verify_mandatory_signaling_wrapped), patch.object(MergeMinedBlockVerifier, 'verify_aux_pow', verify_aux_pow_wrapped), ): - self.manager.verification_service.validate_full(block, self.verification_params) + self.manager.verification_service.validate_full(block, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -554,7 +554,7 @@ def test_transaction_verify_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_basic(tx, self.verification_params) + self.manager.verification_service.verify_basic(tx, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -588,7 +588,7 @@ def test_transaction_verify_without_storage(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(tx, self.verification_params) + self.manager.verification_service.verify_without_storage(tx, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -636,7 +636,7 @@ def test_transaction_verify(self) -> None: patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), patch.object(TransactionVerifier, 'verify_version', verify_tx_version_wrapped), ): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -683,7 +683,7 @@ def test_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.validate_basic(tx, self.verification_params) + self.manager.verification_service.validate_basic(tx, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -702,7 +702,7 @@ def test_transaction_validate_basic(self) -> None: self.assertEqual(tx.get_metadata().validation, ValidationState.BASIC) # full validation should still pass and the validation updated to FULL - self.manager.verification_service.validate_full(tx, self.verification_params) + self.manager.verification_service.validate_full(tx, self.get_verification_params(self.manager)) self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -723,7 +723,7 @@ def test_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), ): - self.manager.verification_service.validate_basic(tx, self.verification_params) + self.manager.verification_service.validate_basic(tx, self.get_verification_params(self.manager)) # Transaction methods verify_parents_basic_wrapped2.assert_not_called() @@ -779,7 +779,7 @@ def test_transaction_validate_full(self) -> None: patch.object(TransactionVerifier, 'verify_reward_locked', verify_reward_locked_wrapped), patch.object(TransactionVerifier, 'verify_version', verify_tx_version_wrapped), ): - self.manager.verification_service.validate_full(tx, self.verification_params) + self.manager.verification_service.validate_full(tx, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -823,7 +823,7 @@ def test_transaction_validate_full(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), ): - self.manager.verification_service.validate_basic(tx, self.verification_params) + self.manager.verification_service.validate_basic(tx, self.get_verification_params(self.manager)) # Transaction methods verify_parents_basic_wrapped2.assert_not_called() @@ -862,7 +862,7 @@ def test_token_creation_transaction_verify_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_basic(tx, self.verification_params) + self.manager.verification_service.verify_basic(tx, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -896,7 +896,7 @@ def test_token_creation_transaction_verify_without_storage(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.verify_without_storage(tx, self.verification_params) + self.manager.verification_service.verify_without_storage(tx, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -948,7 +948,7 @@ def test_token_creation_transaction_verify(self) -> None: patch.object(TokenCreationTransactionVerifier, 'verify_token_info', verify_token_info_wrapped), patch.object(TokenCreationTransactionVerifier, 'verify_minted_tokens', verify_minted_tokens_wrapped), ): - self.manager.verification_service.verify(tx, self.verification_params) + self.manager.verification_service.verify(tx, self.get_verification_params(self.manager)) # Vertex methods verify_outputs_wrapped.assert_called_once() @@ -998,7 +998,7 @@ def test_token_creation_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped), ): - self.manager.verification_service.validate_basic(tx, self.verification_params) + self.manager.verification_service.validate_basic(tx, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() @@ -1018,7 +1018,7 @@ def test_token_creation_transaction_validate_basic(self) -> None: # full validation should still pass and the validation updated to FULL with tx_allow_context(self.manager.tx_storage, allow_scope=TxAllowScope.PARTIAL | TxAllowScope.VALID): - self.manager.verification_service.validate_full(tx, self.verification_params) + self.manager.verification_service.validate_full(tx, self.get_verification_params(self.manager)) self.assertEqual(tx.get_metadata().validation, ValidationState.FULL) # and if running basic validation again it shouldn't validate or change the validation state @@ -1039,7 +1039,7 @@ def test_token_creation_transaction_validate_basic(self) -> None: patch.object(VertexVerifier, 'verify_number_of_outputs', verify_number_of_outputs_wrapped2), patch.object(VertexVerifier, 'verify_sigops_output', verify_sigops_output_wrapped2), ): - self.manager.verification_service.validate_basic(tx, self.verification_params) + self.manager.verification_service.validate_basic(tx, self.get_verification_params(self.manager)) # Transaction methods verify_parents_basic_wrapped2.assert_not_called() @@ -1100,7 +1100,7 @@ def test_token_creation_transaction_validate_full(self) -> None: patch.object(TokenCreationTransactionVerifier, 'verify_token_info', verify_token_info_wrapped), patch.object(TokenCreationTransactionVerifier, 'verify_minted_tokens', verify_minted_tokens_wrapped), ): - self.manager.verification_service.validate_full(tx, self.verification_params) + self.manager.verification_service.validate_full(tx, self.get_verification_params(self.manager)) # Vertex methods verify_version_basic_wrapped.assert_called_once() diff --git a/tests/unittest.py b/tests/unittest.py index 7617d6928..53264ea10 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -8,6 +8,7 @@ from contextlib import contextmanager from typing import Any, Callable, Collection, Iterable, Iterator, Optional from unittest import main as ut_main +from unittest.mock import Mock from structlog import get_logger from twisted.trial import unittest @@ -119,7 +120,6 @@ def setUp(self) -> None: self.rng = Random(self.seed) self._pending_cleanups: list[Callable[..., Any]] = [] self._settings = get_global_settings() - self.verification_params = VerificationParams.default_for_mempool() def tearDown(self) -> None: self.clean_tmpdirs() @@ -521,3 +521,8 @@ def get_address(self, index: int) -> Optional[str]: return None return list(hd.keys.keys())[index] + + @staticmethod + def get_verification_params(manager: HathorManager | None = None) -> VerificationParams: + best_block = manager.tx_storage.get_best_block() if manager else None + return VerificationParams.default_for_mempool(best_block=best_block or Mock()) diff --git a/tests/utils.py b/tests/utils.py index 1abfe221a..ed7cdd815 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -22,12 +22,13 @@ from hathor.mining.cpu_mining_service import CpuMiningService from hathor.simulator.utils import add_new_block, add_new_blocks, gen_new_double_spending, gen_new_tx from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, TxOutput +from hathor.transaction.headers import FeeHeader +from hathor.transaction.headers.fee_header import FeeHeaderEntry from hathor.transaction.scripts import P2PKH, HathorScript, Opcode, parse_address_script from hathor.transaction.token_creation_tx import TokenCreationTransaction from hathor.transaction.token_info import TokenVersion from hathor.transaction.util import get_deposit_token_deposit_amount from hathor.util import Random -from hathor.verification.verification_params import VerificationParams settings = HathorSettings() @@ -603,6 +604,12 @@ def create_fee_tokens(manager: 'HathorManager', address_b58: Optional[str] = Non timestamp=timestamp, token_version=TokenVersion.FEE ) + + tx.headers.append(FeeHeader( + settings=manager._settings, + tx=tx, + fees=[FeeHeaderEntry(token_index=0, amount=fee)]) + ) data_to_sign = tx.get_sighash_all() public_bytes, signature = wallet.get_input_aux_data(data_to_sign, genesis_private_key) @@ -720,8 +727,6 @@ def add_tx_with_data_script(manager: 'HathorManager', data: list[str], propagate manager.cpu_mining_service.resolve(tx) if propagate: - params = VerificationParams.default_for_mempool() - manager.verification_service.verify(tx, params) manager.propagate_tx(tx) assert isinstance(manager.reactor, Clock) manager.reactor.advance(8) diff --git a/tests/wallet/test_wallet.py b/tests/wallet/test_wallet.py index 73d175680..535904afc 100644 --- a/tests/wallet/test_wallet.py +++ b/tests/wallet/test_wallet.py @@ -207,7 +207,7 @@ def test_create_token_transaction(self): tx2.parents = self.manager.get_new_tx_parents() self.manager.cpu_mining_service.resolve(tx2) tx2.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) - self.manager.verification_service.verify(tx2, self.verification_params) + self.manager.verification_service.verify(tx2, self.get_verification_params(self.manager)) self.assertNotEqual(len(tx2.inputs), 0) token_dict = defaultdict(int) diff --git a/tests/wallet/test_wallet_hd.py b/tests/wallet/test_wallet_hd.py index e973f89a8..dae63f102 100644 --- a/tests/wallet/test_wallet_hd.py +++ b/tests/wallet/test_wallet_hd.py @@ -27,7 +27,7 @@ def test_transaction_and_balance(self): new_address = self.wallet.get_unused_address() out = WalletOutputInfo(decode_address(new_address), self.TOKENS, timelock=None) block = add_new_block(self.manager) - self.manager.verification_service.verify(block, self.verification_params) + self.manager.verification_service.verify(block, self.get_verification_params(self.manager)) utxo = self.wallet.unspent_txs[self._settings.HATHOR_TOKEN_UID].get((block.hash, 0)) self.assertIsNotNone(utxo) self.assertEqual(self.wallet.balance[self._settings.HATHOR_TOKEN_UID], WalletBalance(0, self.BLOCK_TOKENS))