diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index e686136a1..2e6211638 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -20,6 +20,7 @@ from typing import TYPE_CHECKING, Iterator, Optional from structlog import get_logger +from typing_extensions import assert_never from hathor.indexes.address_index import AddressIndex from hathor.indexes.base_index import BaseIndex @@ -35,6 +36,7 @@ from hathor.indexes.tokens_index import TokensIndex from hathor.indexes.utxo_index import UtxoIndex from hathor.transaction import BaseTransaction +from hathor.transaction.nc_execution_state import NCExecutionState from hathor.util import tx_progress if TYPE_CHECKING: # pragma: no cover @@ -204,11 +206,141 @@ def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: def update(self, tx: BaseTransaction) -> None: """ This is the new update method that indexes should use instead of add_tx/del_tx """ + self.nc_update_add(tx) + # XXX: this _should_ be here, but it breaks some tests, for now this is done explicitly in hathor.manager # self.mempool_tips.update(tx) if self.utxo: self.utxo.update(tx) + def nc_update_add(self, tx: BaseTransaction) -> None: + from hathor.conf.settings import HATHOR_TOKEN_UID + from hathor.nanocontracts.runner.types import ( + NCSyscallRecord, + SyscallCreateContractRecord, + SyscallUpdateTokensRecord, + ) + from hathor.nanocontracts.types import ContractId + from hathor.transaction.nc_execution_state import NCExecutionState + + if not tx.is_nano_contract(): + return + + meta = tx.get_metadata() + if meta.nc_execution != NCExecutionState.SUCCESS: + return + + assert meta.nc_calls + first_call = meta.nc_calls[0] + nc_syscalls: list[NCSyscallRecord] = [] + + # Add to indexes. + for call in meta.nc_calls: + # Txs that call other contracts are added to those contracts' history. This includes calls to `initialize`. + if self.nc_history: + self.nc_history.add_single_key(call.contract_id, tx) + + # Accumulate all syscalls. + nc_syscalls.extend(call.index_updates) + + created_contracts: set[ContractId] = set() + for syscall in nc_syscalls: + match syscall: + case SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): + assert contract_id not in created_contracts, f'contract {contract_id.hex()} created multiple times' + assert contract_id != first_call.contract_id, ( + f'contract {contract_id.hex()} cannot make a syscall to create itself' + ) + created_contracts.add(contract_id) + + # Txs that create other contracts are added to the NC creation index and blueprint index. + # They're already added to the NC history index, above. + if self.nc_creation: + self.nc_creation.manually_add_tx(tx) + + if self.blueprint_history: + self.blueprint_history.add_single_key(blueprint_id, tx) + + case SyscallUpdateTokensRecord(): + # Minted/melted tokens are added/removed to/from the tokens index, + # and the respective destroyed/created HTR too. + if self.tokens: + try: + self.tokens.get_token_info(syscall.token_uid) + except KeyError: + # If the token doesn't exist in the index yet, it must be a token creation syscall. + from hathor.nanocontracts.runner.types import SyscallRecordType + assert syscall.type is SyscallRecordType.CREATE_TOKEN, syscall.type + assert syscall.token_name is not None and syscall.token_symbol is not None + self.tokens.create_token_info(syscall.token_uid, syscall.token_name, syscall.token_symbol) + + self.tokens.add_to_total(syscall.token_uid, syscall.token_amount) + self.tokens.add_to_total(HATHOR_TOKEN_UID, syscall.htr_amount) + + case _: + assert_never(syscall) + + def nc_update_remove(self, tx: BaseTransaction) -> None: + from hathor.conf.settings import HATHOR_TOKEN_UID + from hathor.nanocontracts.runner.types import ( + NCSyscallRecord, + SyscallCreateContractRecord, + SyscallUpdateTokensRecord, + ) + from hathor.nanocontracts.types import NC_INITIALIZE_METHOD, ContractId + + if not tx.is_nano_contract(): + return + + meta = tx.get_metadata() + assert meta.nc_execution is NCExecutionState.SUCCESS + assert meta.nc_calls + first_call = meta.nc_calls[0] + nc_syscalls: list[NCSyscallRecord] = [] + + # Remove from indexes, but we must keep the first call's contract still in the indexes. + for call in meta.nc_calls: + # Remove from nc_history except where it's the same contract as the first call. + if self.nc_history and call.contract_id != first_call.contract_id: + self.nc_history.remove_single_key(call.contract_id, tx) + + # Accumulate all syscalls. + nc_syscalls.extend(call.index_updates) + + created_contracts: set[ContractId] = set() + for syscall in nc_syscalls: + match syscall: + case SyscallCreateContractRecord(blueprint_id=blueprint_id, contract_id=contract_id): + assert contract_id not in created_contracts, f'contract {contract_id.hex()} created multiple times' + assert contract_id != first_call.contract_id, ( + f'contract {contract_id.hex()} cannot make a syscall to create itself' + ) + created_contracts.add(contract_id) + + # Remove only when the first call is not creating a contract, that is, + # if the tx itself is a nc creation, it must be kept in the indexes. + if first_call.method_name != NC_INITIALIZE_METHOD: + # Remove from nc_creation. + if self.nc_creation: + self.nc_creation.del_tx(tx) + + # Remove from blueprint_history. + if self.blueprint_history: + self.blueprint_history.remove_single_key(blueprint_id, tx) + + case SyscallUpdateTokensRecord(): + # Undo the tokens update. + if self.tokens: + self.tokens.add_to_total(syscall.token_uid, -syscall.token_amount) + self.tokens.add_to_total(HATHOR_TOKEN_UID, -syscall.htr_amount) + + from hathor.nanocontracts.runner.types import SyscallRecordType + if syscall.type is SyscallRecordType.CREATE_TOKEN: + self.tokens.destroy_token(syscall.token_uid) + + case _: + assert_never(syscall) + def add_tx(self, tx: BaseTransaction) -> bool: """ Add a transaction to the indexes diff --git a/hathor/indexes/rocksdb_tokens_index.py b/hathor/indexes/rocksdb_tokens_index.py index 72e85ef91..997912b91 100644 --- a/hathor/indexes/rocksdb_tokens_index.py +++ b/hathor/indexes/rocksdb_tokens_index.py @@ -17,6 +17,7 @@ from typing import TYPE_CHECKING, Iterator, NamedTuple, Optional, TypedDict, cast from structlog import get_logger +from typing_extensions import assert_never, override from hathor.conf.settings import HathorSettings from hathor.indexes.rocksdb_utils import ( @@ -27,6 +28,12 @@ to_internal_token_uid, ) from hathor.indexes.tokens_index import TokenIndexInfo, TokensIndex, TokenUtxoInfo +from hathor.nanocontracts.types import ( + NCAcquireAuthorityAction, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, +) from hathor.transaction import BaseTransaction, Transaction from hathor.transaction.base_transaction import TxVersion from hathor.util import collect_n, json_dumpb, json_loadb @@ -168,7 +175,7 @@ def _to_value_info(self, info: _InfoDict) -> bytes: def _from_value_info(self, value: bytes) -> _InfoDict: return cast(_InfoDict, json_loadb(value)) - def _create_token_info(self, token_uid: bytes, name: str, symbol: str, total: int = 0) -> None: + def create_token_info(self, token_uid: bytes, name: str, symbol: str, total: int = 0) -> None: key = self._to_key_info(token_uid) old_value = self._db.get((self._cf, key)) assert old_value is None @@ -179,7 +186,7 @@ def _create_token_info(self, token_uid: bytes, name: str, symbol: str, total: in }) self._db.put((self._cf, key), value) - def _destroy_token(self, token_uid: bytes) -> None: + def destroy_token(self, token_uid: bytes) -> None: import rocksdb # a writebatch works similar to a "SQL transaction" in that if it fails, either all persist or none @@ -218,14 +225,15 @@ def _remove_authority_utxo(self, token_uid: bytes, tx_hash: bytes, index: int, * self._db.delete((self._cf, self._to_key_authority(token_uid, TokenUtxoInfo(tx_hash, index), is_mint=is_mint))) def _create_genesis_info(self) -> None: - self._create_token_info( + self.create_token_info( self._settings.HATHOR_TOKEN_UID, self._settings.HATHOR_TOKEN_NAME, self._settings.HATHOR_TOKEN_SYMBOL, self._settings.GENESIS_TOKENS, ) - def _add_to_total(self, token_uid: bytes, amount: int) -> None: + @override + def add_to_total(self, token_uid: bytes, amount: int) -> None: key_info = self._to_key_info(token_uid) old_value_info = self._db.get((self._cf, key_info)) if token_uid == self._settings.HATHOR_TOKEN_UID and old_value_info is None: @@ -237,18 +245,6 @@ def _add_to_total(self, token_uid: bytes, amount: int) -> None: new_value_info = self._to_value_info(dict_info) self._db.put((self._cf, key_info), new_value_info) - def _subtract_from_total(self, token_uid: bytes, amount: int) -> None: - key_info = self._to_key_info(token_uid) - old_value_info = self._db.get((self._cf, key_info)) - if token_uid == self._settings.HATHOR_TOKEN_UID and old_value_info is None: - self._create_genesis_info() - old_value_info = self._db.get((self._cf, key_info)) - assert old_value_info is not None - dict_info = self._from_value_info(old_value_info) - dict_info['total'] -= amount - new_value_info = self._to_value_info(dict_info) - self._db.put((self._cf, key_info), new_value_info) - def _add_utxo(self, tx: BaseTransaction, index: int) -> None: """ Add tx to mint/melt indexes and total amount """ @@ -263,7 +259,7 @@ def _add_utxo(self, tx: BaseTransaction, index: int) -> None: # add to melt index self._add_authority_utxo(token_uid, tx.hash, index, is_mint=False) else: - self._add_to_total(token_uid, tx_output.value) + self.add_to_total(token_uid, tx_output.value) def _remove_utxo(self, tx: BaseTransaction, index: int) -> None: """ Remove tx from mint/melt indexes and total amount @@ -280,7 +276,7 @@ def _remove_utxo(self, tx: BaseTransaction, index: int) -> None: # remove from melt index self._remove_authority_utxo(token_uid, tx.hash, index, is_mint=False) else: - self._subtract_from_total(token_uid, tx_output.value) + self.add_to_total(token_uid, -tx_output.value) def add_tx(self, tx: BaseTransaction) -> None: # if it's a TokenCreationTransaction, update name and symbol @@ -292,7 +288,7 @@ def add_tx(self, tx: BaseTransaction) -> None: key_info = self._to_key_info(tx.hash) token_info = self._db.get((self._cf, key_info)) if token_info is None: - self._create_token_info(tx.hash, tx.token_name, tx.token_symbol) + self.create_token_info(tx.hash, tx.token_name, tx.token_symbol) if tx.is_transaction: # Adding this tx to the transactions key list @@ -308,6 +304,24 @@ def add_tx(self, tx: BaseTransaction) -> None: self.log.debug('add utxo', tx=tx.hash_hex, index=index) self._add_utxo(tx, index) + # Handle actions from Nano Contracts. + if tx.is_nano_contract(): + assert isinstance(tx, Transaction) + nano_header = tx.get_nano_header() + ctx = nano_header.get_context() + for action in ctx.__all_actions__: + match action: + case NCDepositAction(): + self.add_to_total(action.token_uid, action.amount) + case NCWithdrawalAction(): + self.add_to_total(action.token_uid, -action.amount) + case NCGrantAuthorityAction() | NCAcquireAuthorityAction(): + # These actions don't affect the nc token balance, + # so no need for any special handling on the index. + pass + case _: + assert_never(action) + def remove_tx(self, tx: BaseTransaction) -> None: for tx_input in tx.inputs: spent_tx = tx.get_spent_tx(tx_input) @@ -324,7 +338,25 @@ def remove_tx(self, tx: BaseTransaction) -> None: # if it's a TokenCreationTransaction, remove it from index if tx.version == TxVersion.TOKEN_CREATION_TRANSACTION: - self._destroy_token(tx.hash) + self.destroy_token(tx.hash) + + # Handle actions from Nano Contracts. + if tx.is_nano_contract(): + assert isinstance(tx, Transaction) + nano_header = tx.get_nano_header() + ctx = nano_header.get_context() + for action in ctx.__all_actions__: + match action: + case NCDepositAction(): + self.add_to_total(action.token_uid, -action.amount) + case NCWithdrawalAction(): + self.add_to_total(action.token_uid, action.amount) + case NCGrantAuthorityAction() | NCAcquireAuthorityAction(): + # These actions don't affect the nc token balance, + # so no need for any special handling on the index. + pass + case _: + assert_never(action) def iter_all_tokens(self) -> Iterator[tuple[bytes, TokenIndexInfo]]: self.log.debug('seek to start') diff --git a/hathor/indexes/tokens_index.py b/hathor/indexes/tokens_index.py index 4c958e869..b582ea021 100644 --- a/hathor/indexes/tokens_index.py +++ b/hathor/indexes/tokens_index.py @@ -114,6 +114,16 @@ def get_token_info(self, token_uid: bytes) -> TokenIndexInfo: """ raise NotImplementedError + @abstractmethod + def create_token_info(self, token_uid: bytes, name: str, symbol: str, total: int = 0) -> None: + """Create a token info for a new token.""" + raise NotImplementedError + + @abstractmethod + def destroy_token(self, token_uid: bytes) -> None: + """Destroy a token.""" + raise NotImplementedError + @abstractmethod def get_transactions_count(self, token_uid: bytes) -> int: """ Get quantity of transactions from requested token @@ -139,3 +149,8 @@ def get_newer_transactions(self, token_uid: bytes, timestamp: int, hash_bytes: b """ Get transactions from the timestamp/hash_bytes reference to the newest """ raise NotImplementedError + + @abstractmethod + def add_to_total(self, token_uid: bytes, amount: int) -> None: + """Add an amount to the total of `token_uid`. The amount may be negative.""" + raise NotImplementedError diff --git a/hathor/nanocontracts/balance_rules.py b/hathor/nanocontracts/balance_rules.py index e32e595dc..087c100d4 100644 --- a/hathor/nanocontracts/balance_rules.py +++ b/hathor/nanocontracts/balance_rules.py @@ -101,7 +101,9 @@ class _DepositRules(BalanceRules[NCDepositAction]): @override def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - raise NotImplementedError('temporarily removed during nano merge') + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info.amount = token_info.amount + self.action.amount + token_dict[self.action.token_uid] = token_info @override def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: @@ -123,7 +125,9 @@ class _WithdrawalRules(BalanceRules[NCWithdrawalAction]): @override def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - raise NotImplementedError('temporarily removed during nano merge') + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info.amount = token_info.amount - self.action.amount + token_dict[self.action.token_uid] = token_info @override def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: @@ -145,7 +149,17 @@ class _GrantAuthorityRules(BalanceRules[NCGrantAuthorityAction]): @override def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - raise NotImplementedError('temporarily removed during nano merge') + assert self.action.token_uid != HATHOR_TOKEN_UID + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + if self.action.mint and not token_info.can_mint: + raise NCInvalidAction( + f'{self.action.name} token {self.action.token_uid.hex()} requires mint, but no input has it' + ) + + if self.action.melt and not token_info.can_melt: + raise NCInvalidAction( + f'{self.action.name} token {self.action.token_uid.hex()} requires melt, but no input has it' + ) @override def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: @@ -187,7 +201,11 @@ class _AcquireAuthorityRules(BalanceRules[NCAcquireAuthorityAction]): @override def verification_rule(self, token_dict: dict[TokenUid, TokenInfo]) -> None: - raise NotImplementedError('temporarily removed during nano merge') + assert self.action.token_uid != HATHOR_TOKEN_UID + token_info = token_dict.get(self.action.token_uid, TokenInfo.get_default()) + token_info.can_mint = token_info.can_mint or self.action.mint + token_info.can_melt = token_info.can_melt or self.action.melt + token_dict[self.action.token_uid] = token_info @override def nc_callee_execution_rule(self, callee_changes_tracker: NCChangesTracker) -> None: diff --git a/hathor/nanocontracts/on_chain_blueprint.py b/hathor/nanocontracts/on_chain_blueprint.py index de0484e20..f0eb32cb1 100644 --- a/hathor/nanocontracts/on_chain_blueprint.py +++ b/hathor/nanocontracts/on_chain_blueprint.py @@ -333,7 +333,12 @@ def get_funds_struct(self) -> bytes: @override def get_sighash_all(self, *, skip_cache: bool = False) -> bytes: - raise NotImplementedError('temporarily removed during nano merge') + if not skip_cache and self._sighash_cache: + return self._sighash_cache + struct_bytes = super().get_sighash_all(skip_cache=True) + struct_bytes += self._serialize_ocb(skip_signature=True) + self._sighash_cache = struct_bytes + return struct_bytes @override def get_funds_fields_from_struct(self, buf: bytes, *, verbose: VerboseCallback = None) -> bytes: diff --git a/hathor/nanocontracts/storage/factory.py b/hathor/nanocontracts/storage/factory.py index db746f55c..e4430b8f6 100644 --- a/hathor/nanocontracts/storage/factory.py +++ b/hathor/nanocontracts/storage/factory.py @@ -43,7 +43,13 @@ def _get_trie(self, root_id: Optional[bytes]) -> 'PatriciaTrie': return trie def get_block_storage_from_block(self, block: Block) -> NCBlockStorage: - raise NotImplementedError('temporarily removed during nano merge') + """Return a block storage. If the block is genesis, it will return an empty block storage.""" + meta = block.get_metadata() + if block.is_genesis: + assert meta.nc_block_root_id is None + return self.get_empty_block_storage() + assert meta.nc_block_root_id is not None + return self.get_block_storage(meta.nc_block_root_id) def get_block_storage(self, block_root_id: bytes) -> NCBlockStorage: """Return a non-empty block storage.""" diff --git a/hathor/transaction/headers/nano_header.py b/hathor/transaction/headers/nano_header.py index cbde15da2..709df3031 100644 --- a/hathor/transaction/headers/nano_header.py +++ b/hathor/transaction/headers/nano_header.py @@ -255,7 +255,51 @@ def get_contract_id(self) -> ContractId: def get_blueprint_id(self, block: Block | None = None) -> BlueprintId: """Return the blueprint id.""" - raise NotImplementedError('temporarily removed during nano merge') + from hathor.nanocontracts.exception import NanoContractDoesNotExist + from hathor.nanocontracts.types import BlueprintId, ContractId, VertexId as NCVertexId + from hathor.transaction import Transaction + from hathor.transaction.storage.exceptions import TransactionDoesNotExist + assert self.tx.storage is not None + + if self.is_creating_a_new_contract(): + blueprint_id = BlueprintId(NCVertexId(self.nc_id)) + return blueprint_id + + if block is None: + block = self.tx.storage.get_best_block() + + try: + nc_storage = self.tx.storage.get_nc_storage(block, ContractId(NCVertexId(self.nc_id))) + blueprint_id = nc_storage.get_blueprint_id() + return blueprint_id + except NanoContractDoesNotExist: + # If the NC storage doesn't exist, the contract must be created by a tx in the mempool + pass + + try: + nc_creation = self.tx.storage.get_transaction(self.nc_id) + except TransactionDoesNotExist as e: + raise NanoContractDoesNotExist from e + + if not nc_creation.is_nano_contract(): + raise NanoContractDoesNotExist(f'not a nano contract tx: {self.nc_id.hex()}') + + assert isinstance(nc_creation, Transaction) + nano_header = nc_creation.get_nano_header() + + if not nano_header.is_creating_a_new_contract(): + raise NanoContractDoesNotExist(f'not a contract creation tx: {self.nc_id.hex()}') + + # must be in the mempool + nc_creation_meta = nc_creation.get_metadata() + if nc_creation_meta.first_block is not None: + # otherwise, it failed or skipped execution + from hathor.transaction.nc_execution_state import NCExecutionState + assert nc_creation_meta.nc_execution in (NCExecutionState.FAILURE, NCExecutionState.SKIPPED) + raise NanoContractDoesNotExist + + blueprint_id = BlueprintId(NCVertexId(nc_creation.get_nano_header().nc_id)) + return blueprint_id def get_actions(self) -> list[NCAction]: """Get a list of NCActions from the header actions.""" diff --git a/hathor/transaction/token_creation_tx.py b/hathor/transaction/token_creation_tx.py index 2aaf6fb31..b603d3053 100644 --- a/hathor/transaction/token_creation_tx.py +++ b/hathor/transaction/token_creation_tx.py @@ -149,13 +149,13 @@ def get_funds_struct(self) -> bytes: return struct_bytes - def get_sighash_all(self) -> bytes: + def get_sighash_all(self, *, skip_cache: bool = False) -> bytes: """ Returns a serialization of the inputs and outputs without including any other field :return: Serialization of the inputs, outputs and tokens :rtype: bytes """ - if self._sighash_cache: + if not skip_cache and self._sighash_cache: return self._sighash_cache struct_bytes = pack( @@ -247,6 +247,10 @@ def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: token_dict = super()._get_token_info_from_inputs() # we add the created token's info to token_dict, as the creation tx allows for mint/melt - token_dict[self.hash] = TokenInfo(0, True, True) + token_dict[self.hash] = TokenInfo( + amount=0, + can_mint=True, + can_melt=True, + ) return token_dict diff --git a/hathor/transaction/transaction.py b/hathor/transaction/transaction.py index c5b1d7ecf..27296ef1f 100644 --- a/hathor/transaction/transaction.py +++ b/hathor/transaction/transaction.py @@ -15,6 +15,7 @@ from __future__ import annotations import hashlib +from dataclasses import dataclass from struct import pack from typing import TYPE_CHECKING, Any, NamedTuple, Optional @@ -42,11 +43,21 @@ _SIGHASH_ALL_FORMAT_STRING = '!BBBBB' -class TokenInfo(NamedTuple): +@dataclass(slots=True, kw_only=True) +class TokenInfo: amount: int can_mint: bool can_melt: bool + @staticmethod + def get_default() -> TokenInfo: + """Create a default, emtpy token info.""" + return TokenInfo( + amount=0, + can_mint=False, + can_melt=False, + ) + class RewardLockedInfo(NamedTuple): block_hash: VertexId @@ -95,6 +106,11 @@ def __init__( self._sighash_cache: Optional[bytes] = None self._sighash_data_cache: Optional[bytes] = None + def clear_sighash_cache(self) -> None: + """Clear caches related to sighash calculation.""" + self._sighash_cache = None + self._sighash_data_cache = None + @property def is_block(self) -> bool: """Returns true if this is a block""" @@ -205,7 +221,7 @@ def get_funds_struct(self) -> bytes: return struct_bytes - def get_sighash_all(self) -> bytes: + def get_sighash_all(self, *, skip_cache: bool = False) -> bytes: """Return a serialization of the inputs, outputs and tokens without including any other field :return: Serialization of the inputs, outputs and tokens @@ -214,7 +230,7 @@ def get_sighash_all(self) -> bytes: # This method does not depend on the input itself, however we call it for each one to sign it. # For transactions that have many inputs there is a significant decrease on the verify time # when using this cache, so we call this method only once. - if self._sighash_cache: + if not skip_cache and self._sighash_cache: return self._sighash_cache struct_bytes = bytearray( @@ -313,6 +329,8 @@ def get_complete_token_info(self) -> dict[TokenUid, TokenInfo]: Get a complete token info dict, including data from both inputs and outputs. """ token_dict = self._get_token_info_from_inputs() + self._update_token_info_from_nano_actions(token_dict=token_dict) + # This one must be called last so token_dict already contains all tokens in inputs and nano actions. self._update_token_info_from_outputs(token_dict=token_dict) return token_dict @@ -324,30 +342,47 @@ def get_minimum_number_of_inputs(self) -> int: return 0 return 1 + def _update_token_info_from_nano_actions(self, *, token_dict: dict[TokenUid, TokenInfo]) -> None: + """Update token_dict with nano actions.""" + if not self.is_nano_contract(): + return + + from hathor.nanocontracts.balance_rules import BalanceRules + nano_header = self.get_nano_header() + + for action in nano_header.get_actions(): + rules = BalanceRules.get_rules(self._settings, action) + rules.verification_rule(token_dict) + def _get_token_info_from_inputs(self) -> dict[TokenUid, TokenInfo]: """Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt) """ token_dict: dict[TokenUid, TokenInfo] = {} - default_info: TokenInfo = TokenInfo(0, False, False) - # add HTR to token dict due to tx melting tokens: there might be an HTR output without any # input or authority. If we don't add it, an error will be raised when iterating through # the outputs of such tx (error: 'no token creation and no inputs for token 00') - token_dict[self._settings.HATHOR_TOKEN_UID] = TokenInfo(0, False, False) + token_dict[self._settings.HATHOR_TOKEN_UID] = TokenInfo.get_default() for tx_input in self.inputs: spent_tx = self.get_spent_tx(tx_input) spent_output = spent_tx.outputs[tx_input.index] token_uid = spent_tx.get_token_uid(spent_output.get_token_index()) - (amount, can_mint, can_melt) = token_dict.get(token_uid, default_info) + token_info = token_dict.get(token_uid, TokenInfo.get_default()) + amount = token_info.amount + can_mint = token_info.can_mint + can_melt = token_info.can_melt if spent_output.is_token_authority(): can_mint = can_mint or spent_output.can_mint_token() can_melt = can_melt or spent_output.can_melt_token() else: amount -= spent_output.value - token_dict[token_uid] = TokenInfo(amount, can_mint, can_melt) + token_dict[token_uid] = TokenInfo( + amount=amount, + can_mint=can_mint, + can_melt=can_melt, + ) return token_dict @@ -364,23 +399,20 @@ def _update_token_info_from_outputs(self, *, token_dict: dict[TokenUid, TokenInf token_info = token_dict.get(token_uid) if token_info is None: raise InvalidToken('no inputs for token {}'.format(token_uid.hex())) + + # for authority outputs, make sure the same capability (mint/melt) was present in the inputs + if tx_output.can_mint_token() and not token_info.can_mint: + raise InvalidToken(f'output at index {index} has mint authority, but no input has it') + if tx_output.can_melt_token() and not token_info.can_melt: + raise InvalidToken(f'output at index {index} has melt authority, but no input has it') + + if tx_output.is_token_authority(): + # make sure we only have authorities that we know of + if tx_output.value > TxOutput.ALL_AUTHORITIES: + raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) else: - # for authority outputs, make sure the same capability (mint/melt) was present in the inputs - if tx_output.can_mint_token() and not token_info.can_mint: - raise InvalidToken('output has mint authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - if tx_output.can_melt_token() and not token_info.can_melt: - raise InvalidToken('output has melt authority, but no input has it: {}'.format( - tx_output.to_human_readable())) - - if tx_output.is_token_authority(): - # make sure we only have authorities that we know of - if tx_output.value > TxOutput.ALL_AUTHORITIES: - raise InvalidToken('Invalid authorities in output (0b{0:b})'.format(tx_output.value)) - else: - # for regular outputs, just subtract from the total amount - sum_tokens = token_info.amount + tx_output.value - token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt) + # for regular outputs, just subtract from the total amount + token_dict[token_uid].amount = token_info.amount + tx_output.value def is_double_spending(self) -> bool: """ Iterate through inputs to check if they were already spent diff --git a/hathor/transaction/transaction_metadata.py b/hathor/transaction/transaction_metadata.py index fa9d2c977..bdbec9c84 100644 --- a/hathor/transaction/transaction_metadata.py +++ b/hathor/transaction/transaction_metadata.py @@ -20,6 +20,8 @@ from hathor.conf.get_settings import get_global_settings from hathor.feature_activation.feature import Feature from hathor.feature_activation.model.feature_state import FeatureState +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.transaction.types import MetaNCCallRecord from hathor.transaction.validation_state import ValidationState from hathor.util import json_dumpb, json_loadb, practically_equal from hathor.utils.weight import work_to_weight @@ -46,6 +48,11 @@ class TransactionMetadata: first_block: Optional[bytes] validation: ValidationState + # Used to store the root node id of the contract tree related to this block. + nc_block_root_id: Optional[bytes] + nc_execution: Optional[NCExecutionState] + nc_calls: Optional[list[MetaNCCallRecord]] + # A dict of features in the feature activation process and their respective state. Must only be used by Blocks, # is None otherwise. This is only used for caching, so it can be safely cleared up, as it would be recalculated # when necessary. @@ -63,6 +70,7 @@ def __init__( hash: Optional[bytes] = None, accumulated_weight: int = 0, score: int = 0, + nc_block_root_id: Optional[bytes] = None, settings: HathorSettings | None = None, ) -> None: from hathor.transaction.genesis import is_genesis @@ -71,6 +79,11 @@ def __init__( self.hash = hash self._tx_ref = None + # Nano contract metadata + self.nc_block_root_id = nc_block_root_id + self.nc_execution = None + self.nc_calls = None + # Tx outputs that have been spent. # The key is the output index, while the value is a set of the transactions which spend the output. self.spent_outputs = spent_outputs or defaultdict(list) @@ -176,7 +189,7 @@ def __eq__(self, other: Any) -> bool: return False for field in ['hash', 'conflict_with', 'voided_by', 'received_by', 'children', 'accumulated_weight', 'twins', 'score', 'first_block', 'validation', - 'feature_states']: + 'feature_states', 'nc_block_root_id', 'nc_calls', 'nc_execution']: if (getattr(self, field) or None) != (getattr(other, field) or None): return False @@ -231,6 +244,9 @@ def to_storage_json(self) -> dict[str, Any]: else: data['first_block'] = None data['validation'] = self.validation.name.lower() + data['nc_block_root_id'] = self.nc_block_root_id.hex() if self.nc_block_root_id else None + data['nc_calls'] = [x.to_json() for x in self.nc_calls] if self.nc_calls else None + data['nc_execution'] = self.nc_execution.value if self.nc_execution else None return data def to_json(self) -> dict[str, Any]: @@ -292,6 +308,24 @@ def create_from_json(cls, data: dict[str, Any]) -> 'TransactionMetadata': _val_name = data.get('validation', None) meta.validation = ValidationState.from_name(_val_name) if _val_name is not None else ValidationState.INITIAL + nc_block_root_id_raw = data.get('nc_block_root_id') + if nc_block_root_id_raw is not None: + meta.nc_block_root_id = bytes.fromhex(nc_block_root_id_raw) + else: + meta.nc_block_root_id = None + + nc_execution_raw = data.get('nc_execution_raw') + if nc_execution_raw is not None: + meta.nc_execution = NCExecutionState(nc_execution_raw) + else: + meta.nc_execution = None + + nc_calls_raw = data.get('nc_calls') + if nc_calls_raw is not None: + meta.nc_calls = [MetaNCCallRecord.from_json(x) for x in nc_calls_raw] + else: + meta.nc_calls = None + return meta @classmethod diff --git a/hathor/verification/verification_service.py b/hathor/verification/verification_service.py index 7930efcf0..3e4f3ada9 100644 --- a/hathor/verification/verification_service.py +++ b/hathor/verification/verification_service.py @@ -245,8 +245,8 @@ def _verify_tx( self.verify_without_storage(tx) self.verifiers.tx.verify_sigops_input(tx) self.verifiers.tx.verify_inputs(tx) # need to run verify_inputs first to check if all inputs exist - self.verifiers.vertex.verify_parents(tx) self.verifiers.tx.verify_sum(token_dict or tx.get_complete_token_info()) + self.verifiers.vertex.verify_parents(tx) if reject_locked_reward: self.verifiers.tx.verify_reward_locked(tx) diff --git a/tests/resources/transaction/test_mining.py b/tests/resources/transaction/test_mining.py index 94b0d45e7..0550751eb 100644 --- a/tests/resources/transaction/test_mining.py +++ b/tests/resources/transaction/test_mining.py @@ -40,6 +40,9 @@ def test_get_block_template_with_address(self): 'feature_activation_bit_counts': [0, 0, 0, 0], 'accumulated_weight_raw': '2', 'score_raw': '0', + 'nc_block_root_id': None, + 'nc_execution': None, + 'nc_calls': None, }, 'tokens': [], 'data': '', @@ -75,6 +78,9 @@ def test_get_block_template_without_address(self): 'feature_activation_bit_counts': [0, 0, 0, 0], 'accumulated_weight_raw': '2', 'score_raw': '0', + 'nc_block_root_id': None, + 'nc_execution': None, + 'nc_calls': None, }, 'tokens': [], 'data': '', diff --git a/tests/resources/transaction/test_tx.py b/tests/resources/transaction/test_tx.py index 884acc30d..3a005dbfc 100644 --- a/tests/resources/transaction/test_tx.py +++ b/tests/resources/transaction/test_tx.py @@ -128,10 +128,10 @@ def test_get_one_known_tx(self): self.manager.tx_storage.save_transaction(tx_input) token_bytes1 = bytes.fromhex('001c382847d8440d05da95420bee2ebeb32bc437f82a9ae47b0745c8a29a7b0d') - self.manager.tx_storage.indexes.tokens._create_token_info(token_bytes1, 'Test Coin', 'TSC') + self.manager.tx_storage.indexes.tokens.create_token_info(token_bytes1, 'Test Coin', 'TSC') token_bytes2 = bytes.fromhex('007231eee3cb6160d95172a409d634d0866eafc8775f5729fff6a61e7850aba5') - self.manager.tx_storage.indexes.tokens._create_token_info(token_bytes2, 'NewCoin', 'NCN') + self.manager.tx_storage.indexes.tokens.create_token_info(token_bytes2, 'NewCoin', 'NCN') response = yield self.web.get( "transaction", {b'id': b'0033784bc8443ba851fd88d81c6f06774ae529f25c1fa8f026884ad0a0e98011'}) @@ -223,7 +223,7 @@ def test_get_one_known_tx_with_authority(self): # Both inputs are the same as the last parent, so no need to manually add them token_bytes1 = bytes.fromhex('000023b318c91dcfd4b967b205dc938f9f5e2fd5114256caacfb8f6dd13db330') - self.manager.tx_storage.indexes.tokens._create_token_info(token_bytes1, 'Wat wat', 'WAT') + self.manager.tx_storage.indexes.tokens.create_token_info(token_bytes1, 'Wat wat', 'WAT') response = yield self.web.get( "transaction", {b'id': b'00005f234469407614bf0abedec8f722bb5e534949ad37650f6077c899741ed7'}) diff --git a/tests/unittest.py b/tests/unittest.py index 93f3bbfd1..87e537e87 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -1,3 +1,4 @@ +import base64 import os import re import secrets @@ -63,6 +64,16 @@ def _get_default_peer_id_pool_filepath() -> str: PEER_ID_POOL = list(_load_peer_pool()) +OCB_TEST_PRIVKEY: bytes = base64.b64decode( + 'MIH0MF8GCSqGSIb3DQEFDTBSMDEGCSqGSIb3DQEFDDAkBBCIdovnmKjK3KU' + 'c61YGgja0AgIIADAMBggqhkiG9w0CCQUAMB0GCWCGSAFlAwQBKgQQl2CJT4' + 'I2IUzRNoU9hyOWEwSBkLznN9Nunel+kK0FXpk//z0ZAnIyVacfHklCxFGyO' + 'j1VSjor0CHzH2Gmblvr+m7lCmRmqSVAwJpplqQYdBUF6sR9djHLY6svPY0o' + '//dqQ/xM7QiY2FHlb3JQCTu7DaMflqPcJXlRXAFyoACnmj4/lUJWgrcWala' + 'rCSI+8rIillg3AU8/2gfoB1BxulVIIG35SQ==' +) +OCB_TEST_PASSWORD: bytes = b'OCBtestPW' + class TestBuilder(Builder): __test__ = False diff --git a/tests/utils.py b/tests/utils.py index 48a420856..70de03753 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -5,7 +5,7 @@ import time import urllib.parse from dataclasses import dataclass -from typing import Any, Optional +from typing import Any, Optional, cast import requests from cryptography.hazmat.backends import default_backend @@ -50,19 +50,62 @@ def resolve_block_bytes(*, block_bytes: bytes, cpu_mining_service: CpuMiningServ return block.get_struct() -def add_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, int]], *, n_outputs: int = 1, - base_parent: Optional[Transaction] = None, weight: Optional[float] = None, - resolve: bool = False, address: Optional[str] = None, inc_timestamp: int = 0) -> Transaction: +def add_custom_tx( + manager: HathorManager, + tx_inputs: list[tuple[BaseTransaction, int]], + *, + n_outputs: int = 1, + base_parent: Optional[Transaction] = None, + weight: Optional[float] = None, + resolve: bool = False, + address: Optional[str] = None, + inc_timestamp: int = 0 +) -> Transaction: """Add a custom tx based on the gen_custom_tx(...) method.""" - tx = gen_custom_tx(manager, tx_inputs, n_outputs=n_outputs, base_parent=base_parent, weight=weight, - resolve=resolve, address=address, inc_timestamp=inc_timestamp) + tx = gen_custom_tx(manager, + tx_inputs, + n_outputs=n_outputs, + base_parent=base_parent, + weight=weight, + resolve=resolve, + address=address, + inc_timestamp=inc_timestamp) manager.propagate_tx(tx) return tx -def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, int]], *, n_outputs: int = 1, - base_parent: Optional[Transaction] = None, weight: Optional[float] = None, - resolve: bool = False, address: Optional[str] = None, inc_timestamp: int = 0) -> Transaction: +def gen_custom_tx(manager: HathorManager, + tx_inputs: list[tuple[BaseTransaction, int]], + *, + n_outputs: int = 1, + base_parent: Optional[Transaction] = None, + weight: Optional[float] = None, + resolve: bool = False, + address: Optional[str] = None, + inc_timestamp: int = 0) -> Transaction: + """Generate a custom tx based on the inputs and outputs. It gives full control to the + inputs and can be used to generate conflicts and specific patterns in the DAG.""" + tx = gen_custom_base_tx(manager, + tx_inputs, + n_outputs=n_outputs, + base_parent=base_parent, + weight=weight, + resolve=resolve, + address=address, + inc_timestamp=inc_timestamp) + return cast(Transaction, tx) + + +def gen_custom_base_tx(manager: HathorManager, + tx_inputs: list[tuple[BaseTransaction, int]], + *, + n_outputs: int = 1, + base_parent: Optional[Transaction] = None, + weight: Optional[float] = None, + resolve: bool = False, + address: Optional[str] = None, + inc_timestamp: int = 0, + cls: type[BaseTransaction] = Transaction) -> BaseTransaction: """Generate a custom tx based on the inputs and outputs. It gives full control to the inputs and can be used to generate conflicts and specific patterns in the DAG.""" wallet = manager.wallet @@ -100,7 +143,7 @@ def gen_custom_tx(manager: HathorManager, tx_inputs: list[tuple[BaseTransaction, else: raise NotImplementedError - tx2 = wallet.prepare_transaction(Transaction, inputs, outputs) + tx2 = wallet.prepare_transaction(cls, inputs, outputs) tx2.storage = manager.tx_storage tx2.timestamp = max(tx_base.timestamp + 1, int(manager.reactor.seconds()))