diff --git a/tests/nanocontracts/blueprints/test_bet.py b/tests/nanocontracts/blueprints/test_bet.py new file mode 100644 index 000000000..1e22aeec9 --- /dev/null +++ b/tests/nanocontracts/blueprints/test_bet.py @@ -0,0 +1,220 @@ +import os +import re +from typing import NamedTuple, Optional + +from hathor.conf import HathorSettings +from hathor.crypto.util import decode_address +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_type +from hathor.nanocontracts.types import ( + Address, + Amount, + ContractId, + NCDepositAction, + NCWithdrawalAction, + SignedData, + Timestamp, + TokenUid, + TxOutputScript, + VertexId, +) +from hathor.transaction import BaseTransaction +from hathor.transaction.scripts import P2PKH +from hathor.util import not_none +from hathor.wallet import KeyPair +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.test_blueprints.bet import Bet + +settings = HathorSettings() + +TX_OUTPUT_SCRIPT_NC_TYPE = make_nc_type_for_type(TxOutputScript) +RESULT_NC_TYPE: NCType[str | None] = make_nc_type_for_type(str | None) # type: ignore[arg-type] +TIMESTAMP_NC_TYPE = make_nc_type_for_type(Timestamp) +TOKEN_UID_NC_TYPE = make_nc_type_for_type(TokenUid) + + +class BetInfo(NamedTuple): + key: KeyPair + address: Address + amount: Amount + score: str + + +class NCBetBlueprintTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.blueprint_id = self.gen_random_blueprint_id() + self.register_blueprint_class(self.blueprint_id, Bet) + self.token_uid = TokenUid(settings.HATHOR_TOKEN_UID) + self.nc_id = ContractId(VertexId(b'1' * 32)) + self.initialize_contract() + self.nc_storage = self.runner.get_storage(self.nc_id) + + def _get_any_tx(self) -> BaseTransaction: + genesis = self.manager.tx_storage.get_all_genesis() + tx = [t for t in genesis if t.is_transaction][0] + return tx + + def _get_any_address(self) -> tuple[Address, KeyPair]: + password = os.urandom(12) + key = KeyPair.create(password) + address_b58 = key.address + address_bytes = Address(decode_address(not_none(address_b58))) + return address_bytes, key + + def get_current_timestamp(self) -> int: + return int(self.clock.seconds()) + + def _make_a_bet(self, amount: int, score: str, *, timestamp: Optional[int] = None) -> BetInfo: + (address_bytes, key) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=amount) + if timestamp is None: + timestamp = self.get_current_timestamp() + context = Context([action], tx, address_bytes, timestamp=timestamp) + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + return BetInfo(key=key, address=Address(address_bytes), amount=Amount(amount), score=score) + + def _set_result(self, result: str, oracle_key: Optional[KeyPair] = None) -> None: + signed_result = SignedData[str](result, b'') + + if oracle_key is None: + oracle_key = self.oracle_key + + result_bytes = signed_result.get_data_bytes(self.nc_id) + signed_result.script_input = oracle_key.p2pkh_create_input_data(b'123', result_bytes) + + tx = self._get_any_tx() + context = Context([], tx, Address(b''), timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'set_result', context, signed_result) + final_result = self.nc_storage.get_obj(b'final_result', RESULT_NC_TYPE) + self.assertEqual(final_result, '2x2') + + def _withdraw(self, address: Address, amount: int) -> None: + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, address, timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def initialize_contract(self) -> None: + self.oracle_key = KeyPair.create(b'123') + assert self.oracle_key.address is not None + self.oracle_script = P2PKH(self.oracle_key.address).get_script() + self.date_last_bet = self.get_current_timestamp() + 3600 * 24 + self.runner.create_contract( + self.nc_id, + self.blueprint_id, + Context([], self._get_any_tx(), Address(b''), timestamp=self.get_current_timestamp()), + self.oracle_script, + self.token_uid, + self.date_last_bet, + ) + + def test_blueprint_initialization(self) -> None: + # if initialization was correct we should be able to observe these in the nc_storage: + self.assertEqual(self.nc_storage.get_obj(b'oracle_script', TX_OUTPUT_SCRIPT_NC_TYPE), self.oracle_script) + self.assertEqual(self.nc_storage.get_obj(b'token_uid', TOKEN_UID_NC_TYPE), self.token_uid) + self.assertEqual(self.nc_storage.get_obj(b'date_last_bet', TIMESTAMP_NC_TYPE), self.date_last_bet) + + def test_basic_flow(self) -> None: + runner = self.runner + + tx = self._get_any_tx() + + ### + # Make some bets. + ### + self._make_a_bet(100, '1x1') + self._make_a_bet(200, '1x1') + self._make_a_bet(300, '1x1') + bet1 = self._make_a_bet(500, '2x2') + + ### + # Set the final result. + ### + self._set_result('2x2') + + ### + # Single winner withdraws all funds. + ### + self.assertEqual(1100, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 100) + self.assertEqual(1000, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 1000) + self.assertEqual(0, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + # Out of funds! Any withdrawal must fail from now on... + amount = 1 + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InsufficientBalance', 'withdrawal amount is greater than available (max: 0)'): + runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_with_withdrawal(self) -> None: + self._make_a_bet(100, '1x1') + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('NCForbiddenAction', 'action WITHDRAWAL is forbidden on method `bet`'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_make_a_bet_after_result(self) -> None: + self._make_a_bet(100, '1x1') + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._make_a_bet(100, '1x1') + + def test_make_a_bet_after_date_last_bet(self) -> None: + with self.assertNCFail('TooLate', re.compile(r'cannot place bets after \d+')): + self._make_a_bet(100, '1x1', timestamp=self.date_last_bet + 1) + + def test_set_results_two_times(self) -> None: + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._set_result('5x1') + + def test_set_results_wrong_signature(self) -> None: + wrong_oracle_key = KeyPair.create(b'123') + with self.assertNCFail('InvalidOracleSignature', ''): + self._set_result('3x2', oracle_key=wrong_oracle_key) + + def test_withdraw_before_result(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + with self.assertNCFail('ResultNotAvailable', ''): + self._withdraw(bet1.address, 100) + + def test_withdraw_with_deposits(self) -> None: + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + with self.assertNCFail('NCForbiddenAction', 'action DEPOSIT is forbidden on method `withdraw`'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_wrong_token(self) -> None: + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCDepositAction(token_uid=token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_withdraw_wrong_token(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCWithdrawalAction(token_uid=token_uid, amount=1) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) diff --git a/tests/nanocontracts/blueprints/test_swap_demo.py b/tests/nanocontracts/blueprints/test_swap_demo.py new file mode 100644 index 000000000..79e084e3e --- /dev/null +++ b/tests/nanocontracts/blueprints/test_swap_demo.py @@ -0,0 +1,116 @@ +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.nc_types import make_nc_type_for_type +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import NCDepositAction, NCWithdrawalAction, TokenUid +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase +from tests.nanocontracts.test_blueprints.swap_demo import InvalidActions, InvalidRatio, InvalidTokens, SwapDemo + +SWAP_NC_TYPE = make_nc_type_for_type(int) + + +class SwapDemoTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + + self.blueprint_id = self.gen_random_blueprint_id() + self.contract_id = self.gen_random_contract_id() + + self.nc_catalog.blueprints[self.blueprint_id] = SwapDemo + + # Test doubles: + self.token_a = self.gen_random_token_uid() + self.token_b = self.gen_random_token_uid() + self.token_c = self.gen_random_token_uid() + self.address = self.gen_random_address() + self.tx = self.get_genesis_tx() + + def _initialize( + self, + init_token_a: tuple[TokenUid, int, int], + init_token_b: tuple[TokenUid, int, int] + ) -> None: + # Arrange: + token_a, multiplier_a, amount_a = init_token_a + token_b, multiplier_b, amount_b = init_token_b + deposit_a = NCDepositAction(token_uid=token_a, amount=amount_a) + deposit_b = NCDepositAction(token_uid=token_b, amount=amount_b) + context = Context( + actions=[deposit_a, deposit_b], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + + # Act: + self.runner.create_contract( + self.contract_id, + self.blueprint_id, + context, + token_a, + token_b, + multiplier_a, + multiplier_b, + ) + self.nc_storage = self.runner.get_storage(self.contract_id) + + def _swap( + self, + amount_a: tuple[int, TokenUid], + amount_b: tuple[int, TokenUid] + ) -> None: + # Arrange: + value_a, token_a = amount_a + value_b, token_b = amount_b + action_a_type = self.get_action_type(value_a) + action_b_type = self.get_action_type(value_b) + swap_a = action_a_type(token_uid=token_a, amount=abs(value_a)) + swap_b = action_b_type(token_uid=token_b, amount=abs(value_b)) + context = Context( + actions=[swap_a, swap_b], + vertex=self.tx, + address=self.address, + timestamp=self.now + ) + + # Act: + self.runner.call_public_method(self.contract_id, 'swap', context) + + def test_lifecycle(self) -> None: + # Create a contract. + # Arrange and act within: + self._initialize((self.token_a, 1, 100_00), (self.token_b, 1, 100_00)) + + # Assert: + self.assertEqual( + Balance(value=100_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_a) + ) + self.assertEqual( + Balance(value=100_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_b) + ) + self.assertEqual(0, self.nc_storage.get_obj(b'swaps_counter', SWAP_NC_TYPE)) + + # Make a valid swap. + # Arrange and act within: + self._swap((20_00, self.token_a), (-20_00, self.token_b)) + # Assert: + self.assertEqual( + Balance(value=120_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_a) + ) + self.assertEqual( + Balance(value=80_00, can_mint=False, can_melt=False), self.nc_storage.get_balance(self.token_b) + ) + self.assertEqual(1, self.nc_storage.get_obj(b'swaps_counter', SWAP_NC_TYPE)) + + # Make multiple invalid swaps raising all possible exceptions. + with self.assertRaises(InvalidTokens): + self._swap((-20_00, self.token_a), (20_00, self.token_c)) + with self.assertRaises(InvalidActions): + self._swap((20_00, self.token_a), (40_00, self.token_b)) + with self.assertRaises(InvalidRatio): + self._swap((20_00, self.token_a), (-40_00, self.token_b)) + + def get_action_type(self, amount: int) -> type[NCDepositAction] | type[NCWithdrawalAction]: + if amount >= 0: + return NCDepositAction + else: + return NCWithdrawalAction diff --git a/tests/nanocontracts/fields/test_deque_field.py b/tests/nanocontracts/fields/test_deque_field.py new file mode 100644 index 000000000..327d50201 --- /dev/null +++ b/tests/nanocontracts/fields/test_deque_field.py @@ -0,0 +1,159 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque +from typing import cast + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.nc_types import VarInt32NCType +from hathor.transaction import Block, Transaction +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + +INT_NC_TYPE = VarInt32NCType() + + +def _test1(dq: deque[int]) -> None: + assert list(dq) == [] + dq.append(1) + dq.appendleft(2) + dq.extend([3, 4]) + dq.extendleft([5, 6]) + assert list(dq) == [6, 5, 2, 1, 3, 4] + assert dq.pop() == 4 + assert dq.popleft() == 6 + assert list(dq) == [5, 2, 1, 3] + assert len(dq) == 4 + dq[1] = 22 + dq[-2] = 11 + assert dq[1] == 22 + assert dq[-2] == 11 + assert list(dq) == [5, 22, 11, 3] + + +def _test2(dq: deque[int]) -> None: + assert list(dq) == [5, 22, 11, 3] + dq.reverse() + assert list(dq) == [3, 11, 22, 5] + dq.append(111) + dq.appendleft(222) + dq.extend([333, 444]) + dq.extendleft([555, 666]) + assert list(dq) == [666, 555, 222, 3, 11, 22, 5, 111, 333, 444] + assert dq.pop() == 444 + assert dq.popleft() == 666 + assert list(dq) == [555, 222, 3, 11, 22, 5, 111, 333] + assert len(dq) == 8 + dq[1] = 2222 + dq[-2] = 1111 + assert dq[1] == 2222 + assert dq[-2] == 1111 + assert list(dq) == [555, 2222, 3, 11, 22, 5, 1111, 333] + + +class BlueprintWithDeque(Blueprint): + dq: deque[int] + + @public + def initialize(self, ctx: Context) -> None: + _test1(self.dq) + + @public + def test(self, ctx: Context) -> None: + _test2(self.dq) + + +class BlueprintWithList(Blueprint): + dq: list[int] + + @public + def initialize(self, ctx: Context) -> None: + _test1(cast(deque, self.dq)) + + @public + def test(self, ctx: Context) -> None: + _test2(cast(deque, self.dq)) + + +class TestDequeField(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('testnet') + self.bp_deque = b'1' * 32 + self.bp_list = b'2' * 32 + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.bp_deque: BlueprintWithDeque, + self.bp_list: BlueprintWithList, + }) + + def _test_deque_field(self, bp_id: bytes) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc1.nc_id = "{bp_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = test() + + nc1 <-- b11 + nc1 <-- nc2 <-- b12 + ''') + artifacts.propagate_with(self.manager) + + b11, b12 = artifacts.get_typed_vertices(['b11', 'b12'], Block) + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + + assert b11.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().first_block == b11.hash + + b11_storage = self.manager.get_nc_storage(b11, nc1.hash) + + with self.assertRaises(KeyError): + b11_storage.get_obj(b'dq:\x7d', INT_NC_TYPE) + assert b11_storage.get_obj(b'dq:\x7e', INT_NC_TYPE) == 5 + assert b11_storage.get_obj(b'dq:\x7f', INT_NC_TYPE) == 22 + assert b11_storage.get_obj(b'dq:\x00', INT_NC_TYPE) == 11 + assert b11_storage.get_obj(b'dq:\x01', INT_NC_TYPE) == 3 + with self.assertRaises(KeyError): + b11_storage.get_obj(b'dq:\x02', INT_NC_TYPE) + + assert b12.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc2.get_metadata().first_block == b12.hash + + b12_storage = self.manager.get_nc_storage(b12, nc1.hash) + + with self.assertRaises(KeyError): + b12_storage.get_obj(b'dq:\x7b', INT_NC_TYPE) + assert b12_storage.get_obj(b'dq:\x7c', INT_NC_TYPE) == 333 + assert b12_storage.get_obj(b'dq:\x7d', INT_NC_TYPE) == 1111 + assert b12_storage.get_obj(b'dq:\x7e', INT_NC_TYPE) == 5 + assert b12_storage.get_obj(b'dq:\x7f', INT_NC_TYPE) == 22 + assert b12_storage.get_obj(b'dq:\x00', INT_NC_TYPE) == 11 + assert b12_storage.get_obj(b'dq:\x01', INT_NC_TYPE) == 3 + assert b12_storage.get_obj(b'dq:\x02', INT_NC_TYPE) == 2222 + assert b12_storage.get_obj(b'dq:\x03', INT_NC_TYPE) == 555 + with self.assertRaises(KeyError): + b12_storage.get_obj(b'dq:\x04', INT_NC_TYPE) + + def test_deque_field_with_deque(self) -> None: + self._test_deque_field(self.bp_deque) + + def test_deque_field_with_list(self) -> None: + self._test_deque_field(self.bp_list) diff --git a/tests/nanocontracts/fields/test_set_field.py b/tests/nanocontracts/fields/test_set_field.py new file mode 100644 index 000000000..f5c23ea11 --- /dev/null +++ b/tests/nanocontracts/fields/test_set_field.py @@ -0,0 +1,101 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.nc_types import VarInt32NCType +from hathor.transaction import Block, Transaction +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder + +INT_NC_TYPE = VarInt32NCType() + + +class MyBlueprint(Blueprint): + my_set: set[int] + + @public + def initialize(self, ctx: Context) -> None: + assert len(self.my_set) == 0 + self.my_set.add(1) + self.my_set.add(1) + self.my_set.update({1, 2, 3, 4, 5}) + assert len(self.my_set) == 5 + assert 1 in self.my_set + assert 5 in self.my_set + + @public + def test1(self, ctx: Context) -> None: + self.my_set.discard(1) + self.my_set.remove(5) + assert 1 not in self.my_set + assert 5 not in self.my_set + + +class TestDequeField(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('testnet') + self.bp_id = b'x' * 32 + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.bp_id: MyBlueprint + }) + + def test_set_field(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + + nc1.nc_id = "{self.bp_id.hex()}" + nc1.nc_method = initialize() + + nc2.nc_id = nc1 + nc2.nc_method = test1() + + nc1 <-- b11 + nc1 <-- nc2 <-- b12 + ''') + artifacts.propagate_with(self.manager) + + b11, b12 = artifacts.get_typed_vertices(['b11', 'b12'], Block) + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + + assert b11.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc1.get_metadata().first_block == b11.hash + + b11_storage = self.manager.get_nc_storage(b11, nc1.hash) + + for i in range(1, 6): + assert b11_storage.get_obj(self._get_key(i), INT_NC_TYPE) == i + + for i in (0, 6): + assert not b11_storage.has_obj(self._get_key(i)) + + assert b12.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc2.get_metadata().first_block == b12.hash + + b12_storage = self.manager.get_nc_storage(b12, nc1.hash) + + for i in range(2, 5): + assert b12_storage.get_obj(self._get_key(i), INT_NC_TYPE) == i + + for i in (1, 5): + assert not b12_storage.has_obj(self._get_key(i)) + + @staticmethod + def _get_key(n: int) -> bytes: + return 'my_set:'.encode() + INT_NC_TYPE.to_bytes(n) diff --git a/tests/nanocontracts/fields/test_storage_deque.py b/tests/nanocontracts/fields/test_storage_deque.py new file mode 100644 index 000000000..17e4da952 --- /dev/null +++ b/tests/nanocontracts/fields/test_storage_deque.py @@ -0,0 +1,372 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from collections import deque + +import pytest + +from hathor.nanocontracts.fields.deque_field import DequeStorageContainer, _DequeMetadata +from hathor.nanocontracts.nc_types import Int32NCType, StrNCType +from tests.nanocontracts.fields.utils import MockNCStorage + +INT_NC_TYPE = Int32NCType() +STR_NC_TYPE = StrNCType() + + +def test_basic() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + + assert storage.store == {} + assert list(dq) == [] + assert dq.maxlen is None + + +def test_append() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.append('a') + dq.append('b') + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=2, reversed=False), + } + assert list(dq) == ['a', 'b'] + + dq.reverse() + dq.append('c') + + assert storage.store == { + b'dq:\x7f': 'c', + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:__metadata__': _DequeMetadata(first_index=-1, length=3, reversed=True), + } + assert list(dq) == ['b', 'a', 'c'] + + +def test_appendleft() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.appendleft('a') + dq.appendleft('b') + + assert storage.store == { + b'dq:\x7e': 'b', + b'dq:\x7f': 'a', + b'dq:__metadata__': _DequeMetadata(first_index=-2, length=2, reversed=False), + } + assert list(dq) == ['b', 'a'] + + dq.reverse() + dq.appendleft('c') + + assert storage.store == { + b'dq:\x7e': 'b', + b'dq:\x7f': 'a', + b'dq:\x00': 'c', + b'dq:__metadata__': _DequeMetadata(first_index=-2, length=3, reversed=True), + } + assert list(dq) == ['c', 'a', 'b'] + + +def test_extend() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + + dq.extend([1, 2, 3]) + + assert storage.store == { + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=False), + } + assert list(dq) == [1, 2, 3] + + dq.reverse() + dq.extend([4, 5]) + + assert storage.store == { + b'dq:\x7e': 5, + b'dq:\x7f': 4, + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=-2, length=5, reversed=True), + } + assert list(dq) == [3, 2, 1, 4, 5] + + py_dq: deque[int] = deque() + py_dq.extend([1, 2, 3]) + py_dq.reverse() + py_dq.extend([4, 5]) + assert list(py_dq) == list(dq) + + +def test_extendleft() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + + dq.extendleft([1, 2, 3]) + + assert storage.store == { + b'dq:\x7d': 3, + b'dq:\x7e': 2, + b'dq:\x7f': 1, + b'dq:__metadata__': _DequeMetadata(first_index=-3, length=3, reversed=False), + } + assert list(dq) == [3, 2, 1] + + dq.reverse() + dq.extendleft([4, 5]) + + assert storage.store == { + b'dq:\x7d': 3, + b'dq:\x7e': 2, + b'dq:\x7f': 1, + b'dq:\x00': 4, + b'dq:\x01': 5, + b'dq:__metadata__': _DequeMetadata(first_index=-3, length=5, reversed=True), + } + assert list(dq) == [5, 4, 1, 2, 3] + + py_dq: deque[int] = deque() + py_dq.extendleft([1, 2, 3]) + py_dq.reverse() + py_dq.extendleft([4, 5]) + assert list(py_dq) == list(dq) + + +def test_pop() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + dq.extend([1, 2, 3, 4]) + + assert dq.pop() == 4 + assert storage.store == { + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=False), + } + + assert dq.pop() == 3 + assert storage.store == { + b'dq:\x00': 1, + b'dq:\x01': 2, + b'dq:__metadata__': _DequeMetadata(first_index=0, length=2, reversed=False), + } + + dq.reverse() + + assert dq.pop() == 1 + assert storage.store == { + b'dq:\x01': 2, + b'dq:__metadata__': _DequeMetadata(first_index=1, length=1, reversed=True), + } + + # popping the last element resets the deque + assert dq.pop() == 2 + assert storage.store == {} + + with pytest.raises(IndexError): + dq.pop() + + +def test_popleft() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + dq.extend([1, 2, 3, 4]) + + assert dq.popleft() == 1 + assert storage.store == { + b'dq:\x01': 2, + b'dq:\x02': 3, + b'dq:\x03': 4, + b'dq:__metadata__': _DequeMetadata(first_index=1, length=3, reversed=False), + } + + assert dq.popleft() == 2 + assert storage.store == { + b'dq:\x02': 3, + b'dq:\x03': 4, + b'dq:__metadata__': _DequeMetadata(first_index=2, length=2, reversed=False), + } + + dq.reverse() + + assert dq.popleft() == 4 + assert storage.store == { + b'dq:\x02': 3, + b'dq:__metadata__': _DequeMetadata(first_index=2, length=1, reversed=True), + } + + # popping the last element resets the deque + assert dq.popleft() == 3 + assert storage.store == {} + + with pytest.raises(IndexError): + dq.popleft() + + +def test_reverse() -> None: + storage = MockNCStorage() + + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + dq.extend(['a', 'b', 'c']) + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=False), + } + assert list(dq) == ['a', 'b', 'c'] + + dq.reverse() + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=3, reversed=True), + } + assert list(dq) == ['c', 'b', 'a'] + + +def test_indexing() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.extend(['a', 'b', 'c', 'd']) + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=False), + } + assert dq[0] == 'a' + assert dq[1] == 'b' + assert dq[2] == 'c' + assert dq[3] == 'd' + + with pytest.raises(IndexError): + _ = dq[4] + + assert dq[-1] == 'd' + assert dq[-2] == 'c' + assert dq[-3] == 'b' + assert dq[-4] == 'a' + + with pytest.raises(IndexError): + _ = dq[-5] + + dq[1] = 'changed1' + dq[-2] = 'changed2' + + with pytest.raises(IndexError): + dq[4] = 'error' + + with pytest.raises(IndexError): + dq[-5] = 'error' + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'changed1', + b'dq:\x02': 'changed2', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=False), + } + assert dq[1] == 'changed1' + assert dq[-2] == 'changed2' + + with pytest.raises(IndexError): + dq[4] = 'error' + + with pytest.raises(IndexError): + dq[-5] = 'error' + + +def test_indexing_reversed() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + + dq.extend(['a', 'b', 'c', 'd']) + dq.reverse() + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'b', + b'dq:\x02': 'c', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=True), + } + assert dq[0] == 'd' + assert dq[1] == 'c' + assert dq[2] == 'b' + assert dq[3] == 'a' + + with pytest.raises(IndexError): + _ = dq[4] + + assert dq[-1] == 'a' + assert dq[-2] == 'b' + assert dq[-3] == 'c' + assert dq[-4] == 'd' + + with pytest.raises(IndexError): + _ = dq[-5] + + dq[1] = 'changed1' + dq[-2] = 'changed2' + + assert storage.store == { + b'dq:\x00': 'a', + b'dq:\x01': 'changed2', + b'dq:\x02': 'changed1', + b'dq:\x03': 'd', + b'dq:__metadata__': _DequeMetadata(first_index=0, length=4, reversed=True), + } + assert dq[1] == 'changed1' + assert dq[-2] == 'changed2' + + +def test_len() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', STR_NC_TYPE) + assert len(dq) == 0 + + dq.append('a') + assert len(dq) == 1 + + dq.append('b') + assert len(dq) == 2 + + dq.reverse() + assert len(dq) == 2 + + +def test_reverse_empty() -> None: + storage = MockNCStorage() + dq = DequeStorageContainer(storage, 'dq', INT_NC_TYPE) + assert list(dq) == [] + dq.reverse() + assert list(dq) == [] diff --git a/tests/nanocontracts/fields/test_storage_set.py b/tests/nanocontracts/fields/test_storage_set.py new file mode 100644 index 000000000..54b90dcb4 --- /dev/null +++ b/tests/nanocontracts/fields/test_storage_set.py @@ -0,0 +1,113 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +import pytest + +from hathor.nanocontracts.fields.set_field import SetStorageContainer +from hathor.nanocontracts.nc_types import Int32NCType +from tests.nanocontracts.fields.utils import MockNCStorage + +_INT_NC_TYPE = Int32NCType() + + +def test_basic() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + + assert len(my_set) == 0 + assert storage.store == {} + + +def test_add_remove_discard() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + + my_set.add(1) + my_set.add(1) + my_set.add(2) + assert _get_values(storage) == {1, 2} + assert len(my_set) == 2 + + my_set.remove(1) + assert _get_values(storage) == {2} + assert len(my_set) == 1 + + my_set.discard(2) + assert _get_values(storage) == set() + assert len(my_set) == 0 + + my_set.discard(1) + with pytest.raises(KeyError): + my_set.remove(1) + + +def test_updates_and_contains() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + + my_set.update({1, 2, 3}, [2, 3, 4]) + assert _get_values(storage) == {1, 2, 3, 4} + assert len(my_set) == 4 + assert 0 not in my_set + assert 1 in my_set + assert 2 in my_set + assert 3 in my_set + assert 4 in my_set + assert 5 not in my_set + + my_set.difference_update({1, 3}, [4]) + assert _get_values(storage) == {2} + assert len(my_set) == 1 + + +def test_isdisjoint() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + my_set.update({1, 2, 3}) + + assert my_set.isdisjoint(set()) + assert my_set.isdisjoint({4, 5, 6}) + assert my_set.isdisjoint({0, 10}) + assert not my_set.isdisjoint({0, 1, 10, 20}) + assert not my_set.isdisjoint({3}) + + +def issuperset() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + my_set.update({1, 2, 3}) + + assert my_set.issuperset({}) + assert my_set.issuperset({1}) + assert my_set.issuperset({1, 2}) + assert my_set.issuperset({1, 2, 3}) + assert not my_set.issuperset({1, 2, 3, 4}) + + +def intersection() -> None: + storage = MockNCStorage() + my_set = SetStorageContainer(storage, 'my_set', _INT_NC_TYPE) + my_set.update({1, 2, 3}) + + assert my_set.intersection(set()) == set() + assert my_set.intersection({1}) == {1} + assert my_set.intersection({1, 2}) == {1, 2} + assert my_set.intersection({1, 2, 3}) == {1, 2, 3} + assert my_set.intersection({1, 2, 3, 4}) == {1, 2, 3} + + +def _get_values(storage: MockNCStorage) -> set[Any]: + return set(value for key, value in storage.store.items() if key != b'my_set:__length__') diff --git a/tests/nanocontracts/on_chain_blueprints/bomb.zlib b/tests/nanocontracts/on_chain_blueprints/bomb.zlib new file mode 100644 index 000000000..5fe4707b4 Binary files /dev/null and b/tests/nanocontracts/on_chain_blueprints/bomb.zlib differ diff --git a/tests/nanocontracts/on_chain_blueprints/test_bet.py b/tests/nanocontracts/on_chain_blueprints/test_bet.py new file mode 100644 index 000000000..b8bf3908a --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_bet.py @@ -0,0 +1,300 @@ +import os +import re +from typing import Any, NamedTuple, Optional + +from hathor.conf import HathorSettings +from hathor.crypto.util import decode_address, get_address_b58_from_public_key_bytes +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_type +from hathor.nanocontracts.types import ( + NC_INITIALIZE_METHOD, + Address, + Amount, + ContractId, + NCDepositAction, + NCWithdrawalAction, + SignedData, + Timestamp, + TokenUid, + TxOutputScript, + VertexId, +) +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb, sign_pycoin +from hathor.simulator.utils import add_new_blocks +from hathor.transaction import BaseTransaction, Transaction +from hathor.transaction.scripts import P2PKH +from hathor.util import initialize_hd_wallet, not_none +from hathor.wallet import KeyPair +from tests import unittest + +from ...utils import DEFAULT_WORDS +from .. import test_blueprints +from .utils import get_ocb_private_key + +settings = HathorSettings() + +ON_CHAIN_BET_NC_CODE: str = load_builtin_blueprint_for_ocb('bet.py', 'Bet', test_blueprints) +TX_OUTPUT_SCRIPT_NC_TYPE = make_nc_type_for_type(TxOutputScript) +RESULT_NC_TYPE: NCType[str | None] = make_nc_type_for_type(str | None) # type: ignore[arg-type] +TIMESTAMP_NC_TYPE = make_nc_type_for_type(Timestamp) +TOKEN_UID_NC_TYPE = make_nc_type_for_type(TokenUid) + + +class BetInfo(NamedTuple): + key: KeyPair + address: Address + amount: Amount + score: str + + +class OnChainBetBlueprintTestCase(unittest.TestCase): + use_memory_storage = True + + def setUp(self) -> None: + super().setUp() + self.manager = self.create_peer('testnet') + self.wallet = initialize_hd_wallet(DEFAULT_WORDS) + self.token_uid = TokenUid(settings.HATHOR_TOKEN_UID) + self.initialize_contract() # will set self.nc_id, self.runner, self.nc_storage + + def _get_any_tx(self) -> BaseTransaction: + genesis = self.manager.tx_storage.get_all_genesis() + tx = [t for t in genesis if t.is_transaction][0] + return tx + + def _get_any_address(self) -> tuple[Address, KeyPair]: + password = os.urandom(12) + key = KeyPair.create(password) + address_b58 = key.address + address_bytes = Address(decode_address(not_none(address_b58))) + return address_bytes, key + + def get_current_timestamp(self) -> int: + return int(self.clock.seconds()) + + def _make_a_bet(self, amount: int, score: str, *, timestamp: Optional[int] = None) -> BetInfo: + (address_bytes, key) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=amount) + if timestamp is None: + timestamp = self.get_current_timestamp() + context = Context([action], tx, address_bytes, timestamp=timestamp) + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + return BetInfo(key=key, address=Address(address_bytes), amount=Amount(amount), score=score) + + def _set_result(self, result: str, oracle_key: Optional[KeyPair] = None) -> None: + signed_result = SignedData[str](result, b'') + + if oracle_key is None: + oracle_key = self.oracle_key + + result_bytes = signed_result.get_data_bytes(self.nc_id) + signed_result.script_input = oracle_key.p2pkh_create_input_data(b'123', result_bytes) + + tx = self._get_any_tx() + context = Context([], tx, Address(b''), timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'set_result', context, signed_result) + final_result = self.nc_storage.get_obj(b'final_result', RESULT_NC_TYPE) + self.assertEqual(final_result, '2x2') + + def _withdraw(self, address: Address, amount: int) -> None: + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, address, timestamp=self.get_current_timestamp()) + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def _create_on_chain_blueprint(self, nc_code: str) -> OnChainBlueprint: + from hathor.nanocontracts.on_chain_blueprint import Code + code = Code.from_python_code(nc_code, self._settings) + timestamp = self.manager.tx_storage.latest_timestamp + 1 + parents = self.manager.get_new_tx_parents(timestamp) + blueprint = OnChainBlueprint( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=self.manager.tx_storage, + timestamp=timestamp, + code=code, + ) + blueprint.weight = self.manager.daa.minimum_tx_weight(blueprint) + blueprint.sign(get_ocb_private_key()) + self.manager.cpu_mining_service.resolve(blueprint) + self.manager.reactor.advance(2) + return blueprint + + def _gen_nc_initialize_tx(self, blueprint: OnChainBlueprint, nc_args: list[Any]) -> Transaction: + method_parser = blueprint.get_method(NC_INITIALIZE_METHOD) + timestamp = int(self.manager.reactor.seconds()) + parents = self.manager.get_new_tx_parents() + + nc = Transaction(timestamp=timestamp, parents=parents) + + nc_id = blueprint.blueprint_id() + nc_method = NC_INITIALIZE_METHOD + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + # sign + address = self.wallet.get_unused_address() + private_key = self.wallet.get_private_key(address) + + from hathor.transaction.headers import NanoHeader + nano_header = NanoHeader( + tx=nc, + nc_seqnum=1, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=[], + ) + nc.headers.append(nano_header) + + sign_pycoin(nano_header, private_key) + + # mine + nc.weight = self.manager.daa.minimum_tx_weight(nc) + self.manager.cpu_mining_service.resolve(nc) + + # advance + self.manager.reactor.advance(2) + return nc + + def initialize_contract(self) -> None: + # create on-chain Bet nanocontract + blueprint = self._create_on_chain_blueprint(ON_CHAIN_BET_NC_CODE) + + related_addresses = set(blueprint.get_related_addresses()) + address = get_address_b58_from_public_key_bytes(blueprint.nc_pubkey) + self.assertIn(address, related_addresses) + + assert self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + add_new_blocks(self.manager, 1, advance_clock=30) # confirm the on-chain blueprint vertex + assert blueprint.get_metadata().first_block is not None + + self.oracle_key = KeyPair.create(b'123') + assert self.oracle_key.address is not None + self.oracle_script = P2PKH(self.oracle_key.address).get_script() + self.date_last_bet = self.get_current_timestamp() + 3600 * 24 + + # initialize an on-chain Bet nanocontract + nc_init_tx = self._gen_nc_initialize_tx(blueprint, [self.oracle_script, self.token_uid, self.date_last_bet]) + assert self.manager.vertex_handler.on_new_relayed_vertex(nc_init_tx) + block, = add_new_blocks(self.manager, 1, advance_clock=30) # confirm the initialization nc transaction + assert nc_init_tx.get_metadata().first_block is not None + + # set expected self objects: + self.nc_id = ContractId(VertexId(nc_init_tx.hash)) + self.runner = self.manager.get_nc_runner(block) + self.nc_storage = self.runner.get_storage(self.nc_id) + + def test_blueprint_initialization(self) -> None: + # if initialization was correct we should be able to observe these in the nc_storage: + self.assertEqual(self.nc_storage.get_obj(b'oracle_script', TX_OUTPUT_SCRIPT_NC_TYPE), self.oracle_script) + self.assertEqual(self.nc_storage.get_obj(b'token_uid', TOKEN_UID_NC_TYPE), self.token_uid) + self.assertEqual(self.nc_storage.get_obj(b'date_last_bet', TIMESTAMP_NC_TYPE), self.date_last_bet) + + def test_basic_flow(self) -> None: + runner = self.runner + + tx = self._get_any_tx() + + ### + # Make some bets. + ### + self._make_a_bet(100, '1x1') + self._make_a_bet(200, '1x1') + self._make_a_bet(300, '1x1') + bet1 = self._make_a_bet(500, '2x2') + + ### + # Set the final result. + ### + self._set_result('2x2') + + ### + # Single winner withdraws all funds. + ### + self.assertEqual(1100, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 100) + self.assertEqual(1000, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + self._withdraw(bet1.address, 1000) + self.assertEqual(0, runner.call_view_method(self.nc_id, 'get_max_withdrawal', bet1.address)) + + # Out of funds! Any withdrawal must fail from now on... + amount = 1 + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InsufficientBalance', 'withdrawal amount is greater than available (max: 0)'): + runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_with_withdrawal(self) -> None: + self._make_a_bet(100, '1x1') + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCWithdrawalAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('NCForbiddenAction', 'action WITHDRAWAL is forbidden on method `bet`'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_make_a_bet_after_result(self) -> None: + self._make_a_bet(100, '1x1') + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._make_a_bet(100, '1x1') + + def test_make_a_bet_after_date_last_bet(self) -> None: + with self.assertNCFail('TooLate', re.compile(r'cannot place bets after \d+')): + self._make_a_bet(100, '1x1', timestamp=self.date_last_bet + 1) + + def test_set_results_two_times(self) -> None: + self._set_result('2x2') + with self.assertNCFail('ResultAlreadySet', ''): + self._set_result('5x1') + + def test_set_results_wrong_signature(self) -> None: + wrong_oracle_key = KeyPair.create(b'123') + with self.assertNCFail('InvalidOracleSignature', ''): + self._set_result('3x2', oracle_key=wrong_oracle_key) + + def test_withdraw_before_result(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + with self.assertNCFail('ResultNotAvailable', ''): + self._withdraw(bet1.address, 100) + + def test_withdraw_with_deposits(self) -> None: + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + action = NCDepositAction(token_uid=self.token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + with self.assertNCFail('NCForbiddenAction', 'action DEPOSIT is forbidden on method `withdraw`'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) + + def test_make_a_bet_wrong_token(self) -> None: + + (address_bytes, _) = self._get_any_address() + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCDepositAction(token_uid=token_uid, amount=1) + context = Context([action], tx, address_bytes, timestamp=self.get_current_timestamp()) + score = '1x1' + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'bet', context, address_bytes, score) + + def test_withdraw_wrong_token(self) -> None: + bet1 = self._make_a_bet(100, '1x1') + + tx = self._get_any_tx() + token_uid = TokenUid(b'xxx') + self.assertNotEqual(token_uid, self.token_uid) + action = NCWithdrawalAction(token_uid=token_uid, amount=1) + context = Context([action], tx, bet1.address, timestamp=self.get_current_timestamp()) + with self.assertNCFail('InvalidToken', 'token different from 00'): + self.runner.call_public_method(self.nc_id, 'withdraw', context) diff --git a/tests/nanocontracts/on_chain_blueprints/test_custom_builtins.py b/tests/nanocontracts/on_chain_blueprints/test_custom_builtins.py new file mode 100644 index 000000000..02bbda963 --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_custom_builtins.py @@ -0,0 +1,198 @@ +import unittest +from builtins import range as builtin_range + +from hathor.nanocontracts.custom_builtins import custom_range + + +class TestCustomRange(unittest.TestCase): + def compare_ranges(self, custom, builtin): + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + self.assertEqual(custom.start, builtin.start) + self.assertEqual(custom.stop, builtin.stop) + self.assertEqual(custom.step, builtin.step) + + def test_single_argument(self): + custom = custom_range(5) + builtin = builtin_range(5) + self.compare_ranges(custom, builtin) + + def test_two_arguments(self): + custom = custom_range(1, 5) + builtin = builtin_range(1, 5) + self.compare_ranges(custom, builtin) + + def test_three_arguments(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + self.compare_ranges(custom, builtin) + + def test_negative_step(self): + custom = custom_range(10, 1, -2) + builtin = builtin_range(10, 1, -2) + self.compare_ranges(custom, builtin) + + def test_empty_range(self): + cases = [(5, 5), (5, 5, -1), (5, 10, -1)] + for args in cases: + custom = custom_range(*args) + builtin = builtin_range(*args) + self.compare_ranges(custom, builtin) + + def test_len(self): + for args in [(5,), (1, 5), (1, 10, 2), (10, 1, -2)]: + custom = custom_range(*args) + builtin = builtin_range(*args) + self.assertEqual(len(custom), len(builtin)) + + def test_eq(self): + self.assertEqual(custom_range(5), custom_range(0, 5, 1)) + self.assertNotEqual(custom_range(5), custom_range(1, 5)) + self.assertNotEqual(custom_range(1, 10, 2), custom_range(1, 10, 3)) + + def test_contains(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for val in [3, 4, 9, 10]: + self.assertEqual(val in custom, val in builtin) + + def test_index(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for val in [3, 9]: + self.assertEqual(custom.index(val), builtin.index(val)) + with self.assertRaises(ValueError): + custom.index(4) + with self.assertRaises(ValueError): + builtin.index(4) + + def test_count(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for val in [3, 4, 9]: + self.assertEqual(custom.count(val), builtin.count(val)) + + def test_getitem(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + for idx in [0, 1, -1]: + self.assertEqual(custom[idx], builtin[idx]) + with self.assertRaises(IndexError): + _ = custom[10] + with self.assertRaises(IndexError): + _ = builtin[10] + + def test_slice_getitem(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + slices = [slice(1, 4), slice(None, None, 2), slice(None, None, -1)] + for sl in slices: + self.compare_ranges(custom[sl], builtin[sl]) + + def test_iter(self): + custom = custom_range(1, 5) + builtin = builtin_range(1, 5) + self.assertEqual(list(iter(custom)), list(iter(builtin))) + + def test_reversed(self): + custom = custom_range(1, 10, 2) + builtin = builtin_range(1, 10, 2) + self.assertEqual(list(reversed(custom)), list(reversed(builtin))) + + def test_invalid_arguments(self): + invalid_args = [(1.5,), (1, '10'), (1, 10, '2')] + for args in invalid_args: + with self.assertRaises(TypeError): + custom_range(*args) + with self.assertRaises(TypeError): + builtin_range(*args) + + def test_large_range(self): + # Very large range + custom = custom_range(0, 10**6, 2) + builtin = builtin_range(0, 10**6, 2) + self.assertEqual(len(custom), len(builtin)) + self.assertEqual(custom[-1], builtin[-1]) + + def test_large_negative_step(self): + # Large negative step + custom = custom_range(10**6, 0, -2) + builtin = builtin_range(10**6, 0, -2) + self.assertEqual(len(custom), len(builtin)) + self.assertEqual(custom[-1], builtin[-1]) + + def test_single_element_range(self): + # Single element ranges + custom = custom_range(5, 6) + builtin = builtin_range(5, 6) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_single_element_negative_step(self): + # Single element with negative step + custom = custom_range(6, 5, -1) + builtin = builtin_range(6, 5, -1) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_start_stop_equal(self): + # Start and stop are the same + custom = custom_range(5, 5) + builtin = builtin_range(5, 5) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_step_larger_than_range(self): + # Step size larger than the range + custom = custom_range(1, 5, 10) + builtin = builtin_range(1, 5, 10) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_reverse_single_step(self): + # Negative step with start and stop reversed by one step + custom = custom_range(1, -1, -1) + builtin = builtin_range(1, -1, -1) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_index_out_of_bounds(self): + # Check handling of out-of-bounds indices + custom = custom_range(1, 10, 2) + with self.assertRaises(IndexError): + _ = custom[100] + with self.assertRaises(IndexError): + _ = custom[-100] + + def test_slice_with_large_step(self): + # Slicing with a large step + custom = custom_range(0, 100) + builtin = builtin_range(0, 100) + self.assertEqual(list(custom[::25]), list(builtin[::25])) + + def test_slice_out_of_bounds(self): + # Slicing out of bounds + custom = custom_range(0, 10) + builtin = builtin_range(0, 10) + self.assertEqual(list(custom[10:20]), list(builtin[10:20])) + self.assertEqual(list(custom[-20:-10]), list(builtin[-20:-10])) + + def test_reverse_entire_range(self): + # Reverse the entire range + custom = custom_range(1, 10) + builtin = builtin_range(1, 10) + self.assertEqual(list(reversed(custom)), list(reversed(builtin))) + + def test_step_one(self): + # Step of 1, which should produce a range identical to start-stop + custom = custom_range(1, 10, 1) + builtin = builtin_range(1, 10, 1) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) + + def test_zero_length_range(self): + # A range with zero length due to the starting conditions + custom = custom_range(10, 0) + builtin = builtin_range(10, 0) + self.assertEqual(list(custom), list(builtin)) + self.assertEqual(len(custom), len(builtin)) diff --git a/tests/nanocontracts/on_chain_blueprints/test_script_restrictions.py b/tests/nanocontracts/on_chain_blueprints/test_script_restrictions.py new file mode 100644 index 000000000..42b9a13f4 --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_script_restrictions.py @@ -0,0 +1,219 @@ +import os + +from hathor.exception import InvalidNewTransaction +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.exception import OCBInvalidScript +from tests import unittest +from tests.nanocontracts.on_chain_blueprints.utils import get_ocb_private_key + + +def _load_file(filename: str) -> bytes: + cur_dir = os.path.dirname(__file__) + filepath = os.path.join(cur_dir, filename) + content = bytearray() + with open(filepath, 'rb') as nc_file: + for line in nc_file.readlines(): + content.extend(line) + return bytes(content) + + +ZLIB_BOMB: bytes = _load_file('bomb.zlib') + + +class OnChainBlueprintScriptTestCase(unittest.TestCase): + use_memory_storage = True + + def setUp(self): + super().setUp() + self.manager = self.create_peer('testnet') + self.verification_service = self.manager.verification_service + + def _ocb_mine(self, blueprint: OnChainBlueprint) -> None: + self.manager.cpu_mining_service.resolve(blueprint) + self.manager.reactor.advance(2) + + def _create_on_chain_blueprint(self, nc_code: str) -> OnChainBlueprint: + from hathor.nanocontracts.on_chain_blueprint import Code + + code = Code.from_python_code(nc_code, self._settings) + timestamp = self.manager.tx_storage.latest_timestamp + 1 + parents = self.manager.get_new_tx_parents(timestamp) + blueprint = OnChainBlueprint( + weight=1, + inputs=[], + outputs=[], + parents=parents, + storage=self.manager.tx_storage, + timestamp=timestamp, + code=code, + ) + blueprint.weight = self.manager.daa.minimum_tx_weight(blueprint) + blueprint.sign(get_ocb_private_key()) + self._ocb_mine(blueprint) + return blueprint + + def _test_forbid_syntax(self, code: str, err_msg: str) -> None: + blueprint = self._create_on_chain_blueprint(code) + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + assert isinstance(cm.exception.__cause__, OCBInvalidScript) + assert isinstance(cm.exception.__cause__.__cause__, SyntaxError) + assert cm.exception.args[0] == 'full validation failed: forbidden syntax' + assert cm.exception.__cause__.__cause__.args[0] == err_msg + + def test_forbid_import(self) -> None: + self._test_forbid_syntax( + 'import os', + 'Import statements are not allowed.', + ) + + def test_forbid_import_from(self) -> None: + self._test_forbid_syntax( + 'from os import path', + 'Importing from "os" is not allowed.', + ) + # XXX: only math.ceil and math.floor are currently allowed, log should error + self._test_forbid_syntax( + 'from math import log', + 'Importing "log" from "math" is not allowed.', + ) + + def test_forbid_try_except(self) -> None: + self._test_forbid_syntax( + 'try:\n ...\nexcept:\n ...', + 'Try/Except blocks are not allowed.', + ) + + def test_forbid_names_blacklist(self) -> None: + forbidden_cases = { + '__builtins__': [ + r'''x = __builtins__('dir')''', + r'''y = __builtins__.dir''', + ], + '__import__': [ + r'''sys = __import__('sys')''', + r'''os = __import__('os.path')''', + r'''path = __import__('os.path', fromlist=[None])''', + ], + 'compile': [ + r'''code = compile('print("foo")')''', + ], + 'delattr': [ + '''x = dict()\nx.foo = 1\ndelattr(x, 'foo')''', + ], + 'dir': [ + '''x = dir()''', + ], + 'eval': [ + '''x = eval('1+1')''', + ], + 'exec': [ + '''exec('x=1+1')''', + ], + 'getattr': [ + '''x = dict()\nx.foo = 1\ny = getattr(x, 'foo')''', + ], + 'globals': [ + '''x = 1\ny = globals()['x']''', + ], + 'hasattr': [ + '''x = dict()\ny = hasattr(x, 'foo')''', + ], + 'input': [ + '''x = input()''', + ], + 'locals': [ + '''x = 1\ny = locals()['x']''', + ], + 'open': [ + '''x = open('foo.txt')''', + ], + 'setattr': [ + '''x = dict()\nsetattr(x, 'foo', 1)''', + ], + 'vars': [ + '''x = vars()''', + ], + } + for attr, codes in forbidden_cases.items(): + for code in codes: + self._test_forbid_syntax(code, f'Usage or reference to {attr} is not allowed.') + + def test_forbid_internal_attr(self) -> None: + self._test_forbid_syntax( + 'x = 1\nx.__class__', + 'Access to internal attributes and methods is not allowed.', + ) + self._test_forbid_syntax( + 'x = 1\nx.__runner', + 'Access to internal attributes and methods is not allowed.', + ) + self._test_forbid_syntax( + 'x = 1\nx._Context__runner', + 'Access to internal attributes and methods is not allowed.', + ) + self._test_forbid_syntax( + 'x = log.__entries__', + 'Access to internal attributes and methods is not allowed.', + ) + + def test_forbid_async_fn(self) -> None: + self._test_forbid_syntax( + 'async def foo():\n ...', + 'Async functions are not allowed.', + ) + + def test_forbid_await_syntax(self) -> None: + # XXX: it is normally forbidden to use await outside an async context, and since async functions cannot be + # defined, it isn't possible to make a realistic code that will fail with await (also applies to other + # syntax nodes as'async for' and 'async with'), however the parser will normally accept this because it + # forms a valid syntax tree + self._test_forbid_syntax( + 'x = await foo()', + 'Await is not allowed.', + ) + self._test_forbid_syntax( + 'async for i in range(10):\n ...', + 'Async loops are not allowed.', + ) + self._test_forbid_syntax( + 'async with foo():\n ...', + 'Async contexts are not allowed.', + ) + + def test_blueprint_type_not_a_class(self) -> None: + blueprint = self._create_on_chain_blueprint('''__blueprint__ = "Bet"''') + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + assert isinstance(cm.exception.__cause__, OCBInvalidScript) + assert cm.exception.args[0] == 'full validation failed: __blueprint__ is not a class' + + def test_blueprint_type_not_blueprint_subclass(self) -> None: + blueprint = self._create_on_chain_blueprint('''class Foo:\n ...\n__blueprint__ = Foo''') + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.vertex_handler.on_new_relayed_vertex(blueprint) + assert isinstance(cm.exception.__cause__, OCBInvalidScript) + assert cm.exception.args[0] == 'full validation failed: __blueprint__ is not a Blueprint subclass' + + def test_zlib_bomb(self) -> None: + from struct import error as StructError + + from hathor.nanocontracts.on_chain_blueprint import ON_CHAIN_BLUEPRINT_VERSION, CodeKind + from hathor.transaction.util import int_to_bytes + from hathor.transaction.vertex_parser import VertexParser + + blueprint = self._create_on_chain_blueprint('') + code = bytearray() + code.extend(int_to_bytes(ON_CHAIN_BLUEPRINT_VERSION, 1)) + code_type = bytes(CodeKind.PYTHON_ZLIB) + code.extend(int_to_bytes(len(ZLIB_BOMB) + len(code_type) + 1, 4)) + code.extend(code_type) + code.extend(ZLIB_BOMB) + blueprint.serialize_code = lambda: code # type: ignore[method-assign] + serialized_blueprint = bytes(blueprint) + parser = VertexParser(settings=self._settings) + with self.assertRaises(StructError) as cm: + _ = parser.deserialize(serialized_blueprint) + cause = cm.exception.__cause__ + self.assertIsInstance(cause, ValueError) + self.assertEqual(cause.args, ('Decompressed code is too long.',)) diff --git a/tests/nanocontracts/on_chain_blueprints/test_structure.py b/tests/nanocontracts/on_chain_blueprints/test_structure.py new file mode 100644 index 000000000..ee0d16752 --- /dev/null +++ b/tests/nanocontracts/on_chain_blueprints/test_structure.py @@ -0,0 +1,45 @@ +from hathor.conf.get_settings import get_global_settings +from hathor.nanocontracts import OnChainBlueprint +from hathor.nanocontracts.utils import load_builtin_blueprint_for_ocb + +from .. import test_blueprints +from .utils import get_ocb_private_key + +# XXX: ON_CHAIN_BET_NC_CODE is not imported from test_bet because test_bet will be refactored out +ON_CHAIN_BET_NC_CODE: str = load_builtin_blueprint_for_ocb('bet.py', 'Bet', test_blueprints) + + +def test_ocb_recompress(): + from hathor.nanocontracts.on_chain_blueprint import Code + from hathor.transaction.vertex_parser import VertexParser + + # XXX: explicitly compression level to confirm that parsing won't re-compress it, since it can't know the + # compression level when decompressing, it must keep the original and thus if it re-compressed it would not + # generate the same sequence + nc_code = ON_CHAIN_BET_NC_CODE + settings = get_global_settings() + # XXX: 3 should be more than enough to make a difference from the default (which is 9) + code = Code.from_python_code(nc_code, settings, compress_level=3) + code2 = Code.from_python_code(nc_code, settings) + # but just to make sure, we test it + assert code.data != code2.data, 'different compression level should yield different results' + ocb = OnChainBlueprint( + weight=1, + inputs=[], + outputs=[], + parents=[ + b'\x01' * 32, + b'\x02' * 32, + ], + timestamp=1234, + code=code, + ) + ocb.weight = 1.234 + ocb.sign(get_ocb_private_key()) + ocb.update_hash() + ocb_bytes = bytes(ocb) + parser = VertexParser(settings=settings) + ocb2 = parser.deserialize(ocb_bytes) + assert ocb == ocb2 + ocb_bytes2 = bytes(ocb2) + assert ocb_bytes == ocb_bytes2 diff --git a/tests/nanocontracts/test_actions.py b/tests/nanocontracts/test_actions.py new file mode 100644 index 000000000..86c46ead4 --- /dev/null +++ b/tests/nanocontracts/test_actions.py @@ -0,0 +1,926 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import dataclasses +from typing import Any +from unittest.mock import patch + +import pytest + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.indexes.tokens_index import TokensIndex +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.exception import NCInvalidAction +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.nc_exec_logs import NCLogConfig +from hathor.nanocontracts.storage.contract_storage import Balance, BalanceKey +from hathor.nanocontracts.types import NCActionType, TokenUid +from hathor.nanocontracts.utils import sign_pycoin +from hathor.transaction import Block, Transaction, TxInput, TxOutput +from hathor.transaction.exceptions import InvalidToken +from hathor.transaction.headers import NanoHeader +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.util import not_none +from hathor.verification.nano_header_verifier import MAX_ACTIONS_LEN +from hathor.wallet import HDWallet +from tests import unittest +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.utils import assert_nc_failure_reason + + +class MyBlueprint(Blueprint): + @public(allow_deposit=True) + def initialize(self, ctx: Context) -> None: + pass + + @public(allow_actions=[ + NCActionType.DEPOSIT, + NCActionType.WITHDRAWAL, + NCActionType.GRANT_AUTHORITY, + NCActionType.ACQUIRE_AUTHORITY, + ]) + def nop(self, ctx: Context) -> None: + pass + + @public + def revoke(self, ctx: Context, token_uid: TokenUid, revoke_mint: bool, revoke_melt: bool) -> None: + self.syscall.revoke_authorities(token_uid=token_uid, revoke_mint=revoke_mint, revoke_melt=revoke_melt) + + @public(allow_deposit=True, allow_withdrawal=True, allow_grant_authority=True) + def mint(self, ctx: Context, token_uid: TokenUid, amount: int) -> None: + self.syscall.mint_tokens(token_uid, amount) + + @public(allow_deposit=True, allow_withdrawal=True) + def melt(self, ctx: Context, token_uid: TokenUid, amount: int) -> None: + self.syscall.melt_tokens(token_uid, amount) + + +class TestActions(unittest.TestCase): + """ + Tests in this file use a hybrid dag builder and manual approach. First, the dag builder is used to setup the + initial state and every vertex that we'll need. Then, we manually manipulate a tx's nano header adding the + required actions and changing inputs/outputs accordingly. + + The dag builder deos not currently support authority actions. Even when it supports them, it's good to keep those + tests manual to make basic assertions without the implicitness of the dag builder. + """ + + def setUp(self) -> None: + super().setUp() + + self.bp_id = b'1' * 32 + self.manager = self.create_peer('testnet', nc_log_config=NCLogConfig.FAILED, wallet_index=True) + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.bp_id: MyBlueprint + }) + assert self.manager.tx_storage.indexes is not None + self.tokens_index: TokensIndex = not_none(self.manager.tx_storage.indexes.tokens) + self.nc_seqnum = 0 + + self.dag_builder = TestDAGBuilder.from_manager(self.manager) + self.artifacts = self.dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + + tx0.nc_id = "{self.bp_id.hex()}" + tx0.nc_method = initialize() + tx0.nc_deposit = 1000 HTR + tx0.nc_deposit = 1000 TKA + + # The fact that HTR is in index 0 and TKA is in index 1 is used by tests below. + tx1.out[0] = 10000 HTR + tx1.out[1] = 1000 TKA + + tx2.out[0] = 10000 HTR + tx2.out[1] = 1000 TKA + + b10 < dummy < TKA < tx0 + tx0 <-- tx1 <-- b11 + b11 < tx2 + tx1 <-- tx2 <-- b12 + ''') + + # We only propagate up to tx0. The rest is manipulated and propagated by each test. + self.artifacts.propagate_with(self.manager, up_to='tx0') + + self.b11, self.b12 = self.artifacts.get_typed_vertices(['b11', 'b12'], Block) + self.tx0, self.tx1, self.tx2, self.tka = self.artifacts.get_typed_vertices( + ['tx0', 'tx1', 'tx2', 'TKA'], + Transaction, + ) + + # We finish a manual setup of tx1, so it can be used directly in verification methods. + self.tx1.storage = self.manager.tx_storage + self.tx1.init_static_metadata_from_storage(self._settings, self.manager.tx_storage) + + # Just some constants. + self.htr_balance_key = BalanceKey(nc_id=self.tx0.hash, token_uid=HATHOR_TOKEN_UID) + self.tka_balance_key = BalanceKey(nc_id=self.tx0.hash, token_uid=self.tka.hash) + + # Initial state sanity check. 30 HTR are used to mint 3000 TKA. + self.initial_htr_total = self._settings.GENESIS_TOKENS + 10 * self._settings.INITIAL_TOKENS_PER_BLOCK - 30 + self.initial_tka_total = 3000 + self._assert_token_index(htr_total=self.initial_htr_total, tka_total=self.initial_tka_total) + + def _set_nano_header( + self, + *, + tx: Transaction, + nc_actions: list[NanoHeaderAction] | None = None, + nc_method: str | None = None, + nc_args: tuple[Any, ...] | None = None, + ) -> None: + """Configure a nano header for a tx.""" + assert len(tx.headers) == 0 + wallet = self.dag_builder._exporter._wallets['main'] + assert isinstance(wallet, HDWallet) + privkey = wallet.get_key_at_index(0) + + nc_args_bytes = b'\x00' + if nc_args is not None: + assert nc_method is not None + method_parser = Method.from_callable(getattr(MyBlueprint, nc_method)) + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + nano_header = NanoHeader( + tx=tx, + nc_seqnum=self.nc_seqnum, + nc_id=self.tx0.hash, + nc_method=nc_method if nc_method is not None else 'nop', + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=nc_actions if nc_actions is not None else [], + ) + self.nc_seqnum += 1 + + sign_pycoin(nano_header, privkey) + tx.headers.append(nano_header) + + def _change_tx_balance( + self, + *, + tx: Transaction, + update_htr_output: int | None = None, + update_tka_output: int | None = None, + add_inputs: list[TxInput] | None = None, + add_outputs: list[TxOutput] | None = None, + ) -> None: + """ + Modify a tx by optionally changing its HTR and TKA output values, or adding new inputs and outputs, + then re-sign all input scripts. + """ + if update_htr_output is not None: + out = tx.outputs[0] + assert tx.get_token_uid(out.get_token_index()) == HATHOR_TOKEN_UID, ( + 'expected HTR in output index 0' + ) + out.value += update_htr_output + + if update_tka_output is not None: + out = tx.outputs[1] + assert tx.get_token_uid(out.get_token_index()) == self.tka.hash, ( + 'expected TKA in output index 1' + ) + out.value += update_tka_output + + if add_inputs: + tx.inputs.extend(add_inputs) + + if add_outputs: + tx.outputs.extend(add_outputs) + + self.dag_builder._exporter.sign_all_inputs(tx) + + def _get_all_balances(self) -> dict[BalanceKey, Balance]: + return self.manager.get_best_block_nc_storage(self.tx0.hash).get_all_balances() + + def _create_tka_mint_input(self) -> TxInput: + """Return a new TxInput pointing to a TKA mint authority.""" + mint_index = len(self.tka.outputs) - 2 + mint_output: TxOutput = self.tka.outputs[mint_index] + token_uid = self.tka.get_token_uid(mint_output.get_token_index()) + assert token_uid == self.tka.hash and mint_output.can_mint_token(), ( + f'expected the dag builder to generate a mint authority in output index {mint_index}' + ) + return TxInput(tx_id=self.tka.hash, index=mint_index, data=b'') + + def _create_tka_melt_input(self) -> TxInput: + """Return a new TxInput pointing to a TKA melt authority.""" + melt_index = len(self.tka.outputs) - 1 + melt_output: TxOutput = self.tka.outputs[melt_index] + token_uid = self.tka.get_token_uid(melt_output.get_token_index()) + assert token_uid == self.tka.hash and melt_output.can_melt_token(), ( + f'expected the dag builder to generate a melt authority in output index {melt_index}' + ) + return TxInput(tx_id=self.tka.hash, index=melt_index, data=b'') + + def _assert_token_index(self, *, htr_total: int, tka_total: int) -> None: + assert self.tokens_index.get_token_info(HATHOR_TOKEN_UID).get_total() == htr_total + assert self.tokens_index.get_token_info(self.tka.hash).get_total() == tka_total + + def test_deposit_success(self) -> None: + # Add a DEPOSIT action and remove tokens from the HTR output accordingly. + self._change_tx_balance(tx=self.tx1, update_htr_output=-123) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=123), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the added tokens. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1123, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + self._settings.INITIAL_TOKENS_PER_BLOCK, + tka_total=self.initial_tka_total, + ) + + def test_withdrawal_success(self) -> None: + # Add a WITHDRAWAL action and add tokens to the HTR output accordingly. + self._change_tx_balance(tx=self.tx1, update_htr_output=123) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=123), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the removed tokens. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=877, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + self._settings.INITIAL_TOKENS_PER_BLOCK, + tka_total=self.initial_tka_total, + ) + + def test_grant_authority_mint_success(self) -> None: + # Add a GRANT_AUTHORITY action to mint TKA, and add a mint authority input accordingly. + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_mint_input()]) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the mint authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + def test_grant_authority_melt_success(self) -> None: + # Add a GRANT_AUTHORITY action to melt TKA, and add a melt authority input accordingly. + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_melt_input()]) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the melt authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + def test_grant_authority_all_success(self) -> None: + # Add a GRANT_AUTHORITY action to both mint and melt TKA, and add authority inputs accordingly. + self._change_tx_balance( + tx=self.tx1, + add_inputs=[ + self._create_tka_mint_input(), + self._create_tka_melt_input(), + ] + ) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.ALL_AUTHORITIES + ), + ]) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with both mint and melt authorities. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=True), + } + + def _test_acquire_authority_to_create_output(self, authority: int) -> None: + token_index = 1 + + # Add an ACQUIRE_AUTHORITY action for TKA, and add a new authority output accordingly, + # both with the provided `authority`. + self._change_tx_balance( + tx=self.tx2, + add_outputs=[ + TxOutput(value=authority, script=b'', token_data=TxOutput.TOKEN_AUTHORITY_MASK | token_index) + ] + ) + self._set_nano_header(tx=self.tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=authority + ), + ]) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + + def test_acquire_authority_create_mint_success(self) -> None: + # Grant a mint authority to the nano contract and use it to create a new mint authority output. + self.test_grant_authority_mint_success() + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MINT_MASK) + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_create_mint_nc_fail(self) -> None: + # Try to create a new mint authority output, but the contract doesn't have that authority. + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MINT_MASK) + + # Check that tx2 fails execution. + assert self.tx2.get_metadata().voided_by == {self.tx2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=self.tx2.hash, + block_id=self.b12.hash, + reason=f'NCInvalidAction: cannot acquire mint authority for token {self.tka.hash_hex}' + ) + + def test_acquire_authority_create_melt_success(self) -> None: + # Grant a melt authority to the nano contract and use it to create a new melt authority output. + self.test_grant_authority_melt_success() + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MELT_MASK) + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_create_melt_nc_fail(self) -> None: + # Try to create a new melt authority output, but the contract doesn't have that authority. + self._test_acquire_authority_to_create_output(TxOutput.TOKEN_MELT_MASK) + + # Check that tx2 fails execution. + assert self.tx2.get_metadata().voided_by == {self.tx2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=self.tx2.hash, + block_id=self.b12.hash, + reason=f'NCInvalidAction: cannot acquire melt authority for token {self.tka.hash_hex}' + ) + + def test_acquire_authority_create_all_success(self) -> None: + # Grant all authorities to the nano contract and use it to create a new all authorities output. + self.test_grant_authority_all_success() + self._test_acquire_authority_to_create_output(TxOutput.ALL_AUTHORITIES) + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_create_all_nc_fail(self) -> None: + # Try to create a new all authorities output, but the contract doesn't have any authorities. + self._test_acquire_authority_to_create_output(TxOutput.ALL_AUTHORITIES) + + # Check that tx2 fails execution. + assert self.tx2.get_metadata().voided_by == {self.tx2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert_nc_failure_reason( + manager=self.manager, + tx_id=self.tx2.hash, + block_id=self.b12.hash, + reason=f'NCInvalidAction: cannot acquire mint authority for token {self.tka.hash_hex}' + ) + + def test_acquire_authority_mint_tokens_success(self) -> None: + # Grant a mint authority to the nano contract and use it to mint tokens. + self.test_grant_authority_mint_success() + + # Add an ACQUIRE_AUTHORITY action for TKA, minting new TKA, and updating the HTR balance accordingly. + self._change_tx_balance( + tx=self.tx2, + update_htr_output=-10, + update_tka_output=1000, + ) + self._set_nano_header(tx=self.tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_acquire_authority_melt_tokens_success(self) -> None: + # Grant a melt authority to the nano contract and use it to melt tokens. + self.test_grant_authority_melt_success() + + # Add an ACQUIRE_AUTHORITY action for TKA, melting TKA, and updating the HTR balance accordingly. + self._change_tx_balance( + tx=self.tx2, + update_htr_output=5, + update_tka_output=-500, + ) + self._set_nano_header(tx=self.tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + + # Check that tx2 successfully executes. + assert self.tx2.get_metadata().voided_by is None + + def test_mint_tokens_success(self) -> None: + # Grant a TKA mint authority to the nano contract and then use it to mint tokens. + self.test_grant_authority_mint_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Add actions so both minted tokens and htr used to mint tokens are in/from the tx outputs/inputs. + self._change_tx_balance(tx=self.tx2, update_htr_output=-200, update_tka_output=20000) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=20000), + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=200), + ], + nc_method='mint', + nc_args=(self.tka.hash, 20000), + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is unchanged because both + # minted tokens and HTR used to mint in/were from tx outputs/inputs. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK - 200, + tka_total=self.initial_tka_total + 20000, + ) + + def test_grant_and_mint_same_tx_success(self) -> None: + # Add a GRANT_AUTHORITY action to mint TKA, and add a mint authority input accordingly. + # Also add a call to mint + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_mint_input()]) + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + ], + nc_method='mint', + nc_args=(self.tka.hash, 200) + ) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the mint authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=998, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1200, can_mint=True, can_melt=False), + } + + def test_mint_tokens_keep_in_contract_success(self) -> None: + # Grant a TKA mint authority to the nano contract and then use it to mint tokens. + self.test_grant_authority_mint_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Add a deposit action, paying for HTR with the input and keeping the minted token in the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=-200) + self._set_nano_header( + tx=self.tx2, + nc_actions=[NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=200)], + nc_method='mint', + nc_args=(self.tka.hash, 20000) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=21000, can_mint=True, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK - 200, + tka_total=self.initial_tka_total + 20000, + ) + + def test_mint_tokens_and_partial_withdrawal_success(self) -> None: + # Grant a TKA mint authority to the nano contract and then use it to mint tokens. + self.test_grant_authority_mint_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=True, can_melt=False), + } + + # Add actions paying for HTR with the input and withdrawing part of the minted token from the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=-200, update_tka_output=10000) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=1, amount=10000), + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=200), + ], + nc_method='mint', + nc_args=(self.tka.hash, 20000) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=11000, can_mint=True, can_melt=False), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK - 200, + tka_total=self.initial_tka_total + 20000, + ) + + def test_melt_tokens_success(self) -> None: + # Grant a TKA melt authority to the nano contract and then use it to melt tokens. + self.test_grant_authority_melt_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Add actions so both melted tokens and htr received from melt are from/in the tx inputs/outputs. + self._change_tx_balance(tx=self.tx2, update_htr_output=5, update_tka_output=-500) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=1, amount=500), + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=5), + ], + nc_method='melt', + nc_args=(self.tka.hash, 500) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is unchanged because both + # melted tokens and HTR received are from/in the tx inputs/outputs. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK + 5, + tka_total=self.initial_tka_total - 500, + ) + + def test_melt_tokens_from_contract_success(self) -> None: + # Grant a TKA melt authority to the nano contract and then use it to melt tokens. + self.test_grant_authority_melt_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Add a withdrawal action receiving the HTR from the melt in the output and melting the tokens in the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=5) + self._set_nano_header( + tx=self.tx2, + nc_actions=[NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=5)], + nc_method='melt', + nc_args=(self.tka.hash, 500) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=500, can_mint=False, can_melt=True), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK + 5, + tka_total=self.initial_tka_total - 500, + ) + + def test_melt_tokens_from_contract_and_input_success(self) -> None: + # Grant a TKA melt authority to the nano contract and then use it to melt tokens. + self.test_grant_authority_melt_success() + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1000, can_mint=False, can_melt=True), + } + + # Add actions so part of the tokens are melted from inputs and part from the contract. + self._change_tx_balance(tx=self.tx2, update_htr_output=5, update_tka_output=-250) + self._set_nano_header( + tx=self.tx2, + nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=1, amount=250), + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=5), + ], + nc_method='melt', + nc_args=(self.tka.hash, 500) + ) + + # Execute tx2 + self.artifacts.propagate_with(self.manager, up_to='b12') + assert self.b12.get_metadata().voided_by is None + assert self.tx2.get_metadata().first_block == self.b12.hash + assert self.tx2.get_metadata().voided_by is None + + # Check that the nano contract balance is updated. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=750, can_mint=False, can_melt=True), + } + + # Check the token index. + self._assert_token_index( + htr_total=self.initial_htr_total + 2 * self._settings.INITIAL_TOKENS_PER_BLOCK + 5, + tka_total=self.initial_tka_total - 500, + ) + + def test_acquire_and_grant_same_token_not_allowed(self) -> None: + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + ], + ) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == f'conflicting actions for token {self.tka.hash_hex}' + + def test_grant_and_acquire_same_token_not_allowed(self) -> None: + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + NanoHeaderAction(type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + ], + ) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == f'conflicting actions for token {self.tka.hash_hex}' + + def test_conflicting_actions(self) -> None: + # Add 2 conflicting actions for the same token. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=1), + NanoHeaderAction(type=NCActionType.WITHDRAWAL, token_index=0, amount=2), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == 'conflicting actions for token 00' + + def test_non_conflicting_actions_success(self) -> None: + # Add a GRANT_AUTHORITY action to mint TKA, and add a mint authority input accordingly. + # Also add a DEPOSIT action with the same token and update the tx output accordingly. + self._change_tx_balance(tx=self.tx1, add_inputs=[self._create_tka_mint_input()]) + self._change_tx_balance(tx=self.tx1, update_tka_output=-100) + self._set_nano_header( + tx=self.tx1, + nc_actions=[ + NanoHeaderAction(type=NCActionType.GRANT_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK), + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=1, amount=100), + ], + ) + + # Execute tx1 + self.artifacts.propagate_with(self.manager, up_to='b11') + assert self.b11.get_metadata().voided_by is None + assert self.tx1.get_metadata().voided_by is None + assert self.tx1.get_metadata().first_block == self.b11.hash + + # Check that the nano contract balance is updated with the mint authority. + assert self._get_all_balances() == { + self.htr_balance_key: Balance(value=1000, can_mint=False, can_melt=False), + self.tka_balance_key: Balance(value=1100, can_mint=True, can_melt=False), + } + + def test_token_index_not_found(self) -> None: + # Add an action with a token index out of bounds. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=2, amount=1), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1) + assert str(e.value) == 'DEPOSIT token index 2 not found' + + def test_token_uid_not_in_list(self) -> None: + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=NCActionType.DEPOSIT, token_index=0, amount=1), + ]) + + nano_header = self.tx1.get_nano_header() + actions = nano_header.get_actions() + + # Here I have to fake and patch get_actions() with an invalid + # one because the nano header always creates valid token uids. + fake_token_uid = b'\1' * 32 + fake_actions = [dataclasses.replace(actions[0], token_uid=TokenUid(fake_token_uid))] + + with patch('hathor.transaction.headers.NanoHeader.get_actions', lambda _: fake_actions): + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verifiers.nano_header.verify_actions(self.tx1) + assert str(e.value) == f'DEPOSIT action requires token {fake_token_uid.hex()} in tokens list' + + def _test_invalid_unknown_authority(self, action_type: NCActionType) -> None: + # Create an authority action with an unknown authority. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=action_type, token_index=1, amount=TxOutput.ALL_AUTHORITIES + 1), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1) + assert str(e.value) == f'action {action_type.name} token {self.tka.hash_hex} invalid authorities: 0b100' + + def _test_invalid_htr_authority(self, action_type: NCActionType) -> None: + # Create an authority action for HTR. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction(type=action_type, token_index=0, amount=TxOutput.TOKEN_MINT_MASK), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1) + assert str(e.value) == f'{action_type.name} action cannot be executed on HTR token' + + def test_invalid_grant_unknown_authority(self) -> None: + self._test_invalid_unknown_authority(NCActionType.GRANT_AUTHORITY) + + def test_invalid_acquire_unknown_authority(self) -> None: + self._test_invalid_unknown_authority(NCActionType.ACQUIRE_AUTHORITY) + + def test_invalid_grant_htr_authority(self) -> None: + self._test_invalid_htr_authority(NCActionType.GRANT_AUTHORITY) + + def test_invalid_acquire_htr_authority(self) -> None: + self._test_invalid_htr_authority(NCActionType.ACQUIRE_AUTHORITY) + + def test_grant_authority_cannot_mint(self) -> None: + # Try to grant a TKA mint authority without an authority input. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, + token_index=1, + amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1) + assert str(e.value) == f'GRANT_AUTHORITY token {self.tka.hash_hex} requires mint, but no input has it' + + def test_grant_authority_cannot_melt(self) -> None: + # Try to grant a TKA melt authority without an authority input. + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, + token_index=1, + amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + with pytest.raises(NCInvalidAction) as e: + self.manager.verification_service.verify(self.tx1) + assert str(e.value) == f'GRANT_AUTHORITY token {self.tka.hash_hex} requires melt, but no input has it' + + def test_acquire_authority_cannot_mint_with_melt(self) -> None: + # Try to create a mint authority output with an action to acquire a melt authority. + self._change_tx_balance( + tx=self.tx1, + add_outputs=[ + TxOutput(value=TxOutput.TOKEN_MINT_MASK, script=b'', token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1) + ] + ) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MELT_MASK + ), + ]) + + with pytest.raises(InvalidToken, match='output at index 2 has mint authority, but no input has it'): + self.manager.verification_service.verify(self.tx1) + + def test_use_authority_cannot_melt_with_mint(self) -> None: + # Try to create a melt authority output with an action to acquire a mint authority. + self._change_tx_balance( + tx=self.tx1, + add_outputs=[ + TxOutput(value=TxOutput.TOKEN_MELT_MASK, script=b'', token_data=TxOutput.TOKEN_AUTHORITY_MASK | 1) + ] + ) + self._set_nano_header(tx=self.tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=TxOutput.TOKEN_MINT_MASK + ), + ]) + + with pytest.raises(InvalidToken, match='output at index 2 has melt authority, but no input has it'): + self.manager.verification_service.verify(self.tx1) + + def test_actions_max_len_fail(self) -> None: + # Try to create too many actions. + action = NanoHeaderAction(type=NCActionType.ACQUIRE_AUTHORITY, token_index=1, amount=1) + actions = [action] * (MAX_ACTIONS_LEN + 1) + + self._set_nano_header(tx=self.tx1, nc_actions=actions) + + with pytest.raises(NCInvalidAction, match='more actions than the max allowed: 17 > 16'): + self.manager.verification_service.verify(self.tx1) diff --git a/tests/nanocontracts/test_allowed_actions.py b/tests/nanocontracts/test_allowed_actions.py new file mode 100644 index 000000000..7edadb9c5 --- /dev/null +++ b/tests/nanocontracts/test_allowed_actions.py @@ -0,0 +1,152 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.exception import BlueprintSyntaxError, NCForbiddenAction +from hathor.nanocontracts.runner.types import NCArgs +from hathor.nanocontracts.types import ( + NCAcquireAuthorityAction, + NCAction, + NCActionType, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, + fallback, +) +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def nop(self, ctx: Context) -> None: + pass + + @public(allow_deposit=True) + def deposit(self, ctx: Context) -> None: + pass + + @public(allow_withdrawal=True) + def withdrawal(self, ctx: Context) -> None: + pass + + @public(allow_grant_authority=True) + def grant_authority(self, ctx: Context) -> None: + pass + + @public(allow_acquire_authority=True) + def acquire_authority(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> None: + pass + + +class TestAllowedActions(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id = self.gen_random_blueprint_id() + self.contract_id = self.gen_random_contract_id() + self.register_blueprint_class(self.blueprint_id, MyBlueprint) + + self.token_a = self.gen_random_token_uid() + self.address = self.gen_random_address() + self.tx = self.get_genesis_tx() + + self.all_actions: set[NCAction] = { + NCDepositAction(token_uid=self.token_a, amount=123), + NCWithdrawalAction(token_uid=self.token_a, amount=123), + NCGrantAuthorityAction(token_uid=self.token_a, mint=True, melt=True), + NCAcquireAuthorityAction(token_uid=self.token_a, mint=True, melt=True), + } + + all_actions_types = [action.type for action in self.all_actions] + for action_type in NCActionType: + # To make sure we remember to test new action types when we implement them + assert action_type in all_actions_types, f'missing {action_type.name}' + + def _get_context(self, *actions: NCAction) -> Context: + return Context( + actions=list(actions), + vertex=self.tx, + address=self.address, + timestamp=self.now, + ) + + def test_no_actions_allowed(self) -> None: + self.runner.create_contract(self.contract_id, self.blueprint_id, self._get_context()) + for action in self.all_actions: + ctx = self._get_context(action) + + # Test on public method + with pytest.raises(NCForbiddenAction, match=f'action {action.name} is forbidden on method `nop`'): + self.runner.call_public_method(self.contract_id, 'nop', ctx) + + # Test on fallback method + with pytest.raises(NCForbiddenAction, match=f'action {action.name} is forbidden on method `fallback`'): + self.runner.call_public_method(self.contract_id, 'unknown', ctx) + + def test_conflicting_params(self) -> None: + msg = 'use only one of `allow_actions` or per-action flags: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class InvalidBlueprint(Blueprint): + @public(allow_deposit=True, allow_actions=[NCActionType.DEPOSIT]) + def initialize(self, ctx: Context) -> None: + pass + + def test_allow_specific_action_on_public(self) -> None: + for allowed_action in self.all_actions: + runner = self.build_runner() + runner.create_contract(self.contract_id, self.blueprint_id, self._get_context()) + method_name = allowed_action.name.lower() + forbidden_actions = self.all_actions.difference({allowed_action}) + + for forbidden_action in forbidden_actions: + msg = f'action {forbidden_action.name} is forbidden on method `{method_name}`' + ctx = self._get_context(forbidden_action) + with pytest.raises(NCForbiddenAction, match=msg): + runner.call_public_method(self.contract_id, method_name, ctx) + + def test_allow_specific_action_on_fallback(self) -> None: + for allowed_action in self.all_actions: + class MyOtherBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback(allow_actions=[allowed_action.type]) + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> None: + pass + + runner = self.build_runner() + blueprint_id = self.gen_random_blueprint_id() + self.register_blueprint_class(blueprint_id, MyOtherBlueprint) + runner.create_contract(self.contract_id, blueprint_id, self._get_context()) + method_name = allowed_action.name.lower() + forbidden_actions = self.all_actions.difference({allowed_action}) + + for forbidden_action in forbidden_actions: + msg = f'action {forbidden_action.name} is forbidden on method `fallback`' + ctx = self._get_context(forbidden_action) + with pytest.raises(NCForbiddenAction, match=msg): + runner.call_public_method(self.contract_id, method_name, ctx) diff --git a/tests/nanocontracts/test_blueprint.py b/tests/nanocontracts/test_blueprint.py new file mode 100644 index 000000000..c6f75a542 --- /dev/null +++ b/tests/nanocontracts/test_blueprint.py @@ -0,0 +1,326 @@ +from hathor.nanocontracts.blueprint import Blueprint +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.exception import BlueprintSyntaxError, NCFail, NCInsufficientFunds, NCViewMethodError +from hathor.nanocontracts.nc_types import make_nc_type_for_type +from hathor.nanocontracts.storage import NCBlockStorage, NCMemoryStorageFactory +from hathor.nanocontracts.storage.backends import MemoryNodeTrieStore +from hathor.nanocontracts.storage.contract_storage import Balance, BalanceKey +from hathor.nanocontracts.storage.patricia_trie import PatriciaTrie +from hathor.nanocontracts.types import ( + Address, + BlueprintId, + ContractId, + NCDepositAction, + NCWithdrawalAction, + TokenUid, + VertexId, + public, + view, +) +from tests import unittest +from tests.nanocontracts.utils import TestRunner + +STR_NC_TYPE = make_nc_type_for_type(str) +BYTES_NC_TYPE = make_nc_type_for_type(bytes) +INT_NC_TYPE = make_nc_type_for_type(int) +BOOL_NC_TYPE = make_nc_type_for_type(bool) + +MOCK_ADDRESS = Address(b'') + + +class SimpleFields(Blueprint): + a: str + b: bytes + c: int + d: bool + + @public + def initialize(self, ctx: Context, a: str, b: bytes, c: int, d: bool) -> None: + self.a = a + self.b = b + self.c = c + self.d = d + + # Read the content of the variable. + if self.a: + pass + + +class ContainerFields(Blueprint): + a: dict[str, str] + b: dict[str, bytes] + c: dict[str, int] + + def _set(self, _dict, key, value): + _dict[key] = value + assert key in _dict + assert _dict[key] == value + del _dict[key] + assert key not in _dict + _dict[key] = value + + @public + def initialize(self, ctx: Context, items: list[tuple[str, str, bytes, int]]) -> None: + for key, va, vb, vc in items: + self._set(self.a, key, va) + self._set(self.b, key, vb) + self._set(self.c, key, vc) + + +class MyBlueprint(Blueprint): + a: int + + @public + def initialize(self, ctx: Context) -> None: + self.a = 1 + + @public(allow_deposit=True, allow_withdrawal=True) + def nop(self, ctx: Context) -> None: + pass + + @public + def fail(self, ctx: Context) -> None: + self.a = 2 + raise NCFail() + self.a = 3 + + @view + def my_private_method_fail(self) -> None: + # This operation is not permitted because private methods + # cannot change the transaction state. + self.a = 2 + + @view + def my_private_method_nop(self) -> int: + return 1 + + +class NCBlueprintTestCase(unittest.TestCase): + def setUp(self) -> None: + super().setUp() + self.simple_fields_id = ContractId(VertexId(b'1' * 32)) + self.container_fields_id = ContractId(VertexId(b'2' * 32)) + self.my_blueprint_id = ContractId(VertexId(b'3' * 32)) + + nc_storage_factory = NCMemoryStorageFactory() + store = MemoryNodeTrieStore() + block_trie = PatriciaTrie(store) + block_storage = NCBlockStorage(block_trie) + self.manager = self.create_peer('testnet') + self.runner = TestRunner( + self.manager.tx_storage, nc_storage_factory, block_storage, settings=self._settings, reactor=self.reactor + ) + + self.blueprint_ids: dict[str, BlueprintId] = { + 'simple_fields': BlueprintId(VertexId(b'a' * 32)), + 'container_fields': BlueprintId(VertexId(b'b' * 32)), + 'my_blueprint': BlueprintId(VertexId(b'c' * 32)), + } + + nc_catalog = self.manager.tx_storage.nc_catalog + nc_catalog.blueprints[self.blueprint_ids['simple_fields']] = SimpleFields + nc_catalog.blueprints[self.blueprint_ids['container_fields']] = ContainerFields + nc_catalog.blueprints[self.blueprint_ids['my_blueprint']] = MyBlueprint + + genesis = self.manager.tx_storage.get_all_genesis() + self.tx = [t for t in genesis if t.is_transaction][0] + + def test_simple_fields(self) -> None: + blueprint_id = self.blueprint_ids['simple_fields'] + nc_id = self.simple_fields_id + + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + a = 'str' + b = b'bytes' + c = 123 + d = True + self.runner.create_contract(nc_id, blueprint_id, ctx, a, b, c, d) + + storage = self.runner.get_storage(nc_id) + self.assertEqual(storage.get_obj(b'a', STR_NC_TYPE), a) + self.assertEqual(storage.get_obj(b'b', BYTES_NC_TYPE), b) + self.assertEqual(storage.get_obj(b'c', INT_NC_TYPE), c) + self.assertEqual(storage.get_obj(b'd', BOOL_NC_TYPE), d) + + def test_container_fields(self) -> None: + blueprint_id = self.blueprint_ids['container_fields'] + nc_id = self.container_fields_id + + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + items = [ + ('a', '1', b'1', 1), + ('b', '2', b'2', 2), + ('c', '3', b'3', 3), + ] + self.runner.create_contract(nc_id, blueprint_id, ctx, items) + + storage = self.runner.get_storage(nc_id) + self.assertEqual(storage.get_obj(b'a:\x01a', STR_NC_TYPE), '1') + self.assertEqual(storage.get_obj(b'a:\x01b', STR_NC_TYPE), '2') + self.assertEqual(storage.get_obj(b'a:\x01c', STR_NC_TYPE), '3') + + def _create_my_blueprint_contract(self) -> None: + blueprint_id = self.blueprint_ids['my_blueprint'] + nc_id = self.my_blueprint_id + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.create_contract(nc_id, blueprint_id, ctx) + + def test_public_method_fails(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + + with self.assertRaises(NCFail): + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.call_public_method(nc_id, 'fail', ctx) + self.assertEqual(1, storage.get_obj(b'a', INT_NC_TYPE)) + + def test_private_method_change_state(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + with self.assertRaises(NCViewMethodError): + self.runner.call_view_method(nc_id, 'my_private_method_fail') + + def test_private_method_success(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + self.assertEqual(1, self.runner.call_view_method(nc_id, 'my_private_method_nop')) + + def test_initial_balance(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + self.assertEqual(Balance(value=0, can_mint=False, can_melt=False), storage.get_balance(MOCK_ADDRESS)) + + def test_nop(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + ctx = Context([], self.tx, MOCK_ADDRESS, timestamp=0) + self.runner.call_public_method(nc_id, 'nop', ctx) + + def test_withdrawal_fail(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + token_uid = TokenUid(b'\0') + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=1)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + with self.assertRaises(NCInsufficientFunds): + self.runner.call_public_method(nc_id, 'nop', ctx) + + def test_deposits_and_withdrawals(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + token_uid = TokenUid(b'\0') + ctx = Context( + [NCDepositAction(token_uid=token_uid, amount=100)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=1)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=99, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=50)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=49, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=token_uid, amount=50)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + with self.assertRaises(NCInsufficientFunds): + self.runner.call_public_method(nc_id, 'nop', ctx) + + def test_withdraw_wrong_token(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + + token_uid = TokenUid(b'\0') + wrong_token_uid = TokenUid(b'\1') + + ctx = Context( + [NCDepositAction(token_uid=token_uid, amount=100)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + ctx = Context( + [NCWithdrawalAction(token_uid=wrong_token_uid, amount=1)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + with self.assertRaises(NCInsufficientFunds): + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + def test_invalid_field(self) -> None: + with self.assertRaises(BlueprintSyntaxError): + class WrongBlueprint(Blueprint): + a: float + + @public + def initialize(self, ctx: Context) -> None: + self.a = 1.2 + + def test_balances(self) -> None: + self._create_my_blueprint_contract() + nc_id = self.my_blueprint_id + storage = self.runner.get_storage(nc_id) + + token_uid = TokenUid(b'\0') # HTR + ctx = Context( + [NCDepositAction(token_uid=token_uid, amount=100)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=100, can_mint=False, can_melt=False), storage.get_balance(token_uid)) + + token_uid2 = TokenUid(b'\0' + b'\1' * 31) + ctx = Context( + [NCDepositAction(token_uid=token_uid2, amount=200)], + self.tx, + MOCK_ADDRESS, + timestamp=0, + ) + self.runner.call_public_method(nc_id, 'nop', ctx) + self.assertEqual(Balance(value=200, can_mint=False, can_melt=False), storage.get_balance(token_uid2)) + + all_balances = storage.get_all_balances() + key1 = BalanceKey(nc_id, token_uid) + key2 = BalanceKey(nc_id, token_uid2) + + self.assertEqual( + all_balances, + { + key1: Balance(value=100, can_mint=False, can_melt=False), + key2: Balance(value=200, can_mint=False, can_melt=False), + } + ) diff --git a/tests/nanocontracts/test_blueprint_syntax.py b/tests/nanocontracts/test_blueprint_syntax.py new file mode 100644 index 000000000..ab3569a3e --- /dev/null +++ b/tests/nanocontracts/test_blueprint_syntax.py @@ -0,0 +1,567 @@ +# Copyright 2025 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +import pytest + +from hathor.nanocontracts import Blueprint, Context, public, view +from hathor.nanocontracts.exception import BlueprintSyntaxError +from hathor.nanocontracts.runner.types import NCArgs +from hathor.nanocontracts.types import Address, fallback +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + + +class TestBlueprintSyntax(BlueprintTestCase): + def setUp(self) -> None: + super().setUp() + + self.blueprint_id = self.gen_random_blueprint_id() + self.contract_id = self.gen_random_contract_id() + self.ctx = Context( + actions=[], + vertex=self.get_genesis_tx(), + address=Address(self.gen_random_address()), + timestamp=self.now, + ) + + def test_success(self) -> None: + class MyBlueprint(Blueprint): + a: str + + @public + def initialize(self, ctx: Context, a: int) -> int: + return a + + @view + def some_view(self, a: int) -> int: + return a + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> int: + return 123 + + self.nc_catalog.blueprints[self.blueprint_id] = MyBlueprint + self.runner.create_contract(self.contract_id, self.blueprint_id, self.ctx, 123) + + def test_forbidden_field_name(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='field name is forbidden: `log`'): + class MyBlueprint(Blueprint): + log: str # type: ignore + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_field_name_with_underscore(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='field name cannot start with underscore: `_a`'): + class MyBlueprint(Blueprint): + _a: str + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_field_with_default(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='fields with default values are currently not supported: `a`'): + class MyBlueprint(Blueprint): + a: str = 'a' + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_no_initialize(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='blueprints require a method called `initialize`'): + class MyBlueprint(Blueprint): + pass + + def test_initialize_non_public(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='`initialize` method must be annotated with @public'): + class MyBlueprint(Blueprint): + def initialize(self, ctx: Context) -> None: + pass + + def test_initialize_view(self) -> None: + with pytest.raises(BlueprintSyntaxError, match='`initialize` method cannot be annotated with @view'): + class MyBlueprint(Blueprint): + @view + def initialize(self, ctx: Context) -> None: + pass + + def test_initialize_fallback(self) -> None: + msg = '@fallback method must be called `fallback`: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @fallback + def initialize(self, ctx: Context) -> None: + pass + + def test_public_missing_self(self) -> None: + msg = '@public method must have `self` argument: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize() -> None: # type: ignore + pass + + def test_public_wrong_self(self) -> None: + msg = '@public method first argument must be called `self`: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(wrong) -> None: + pass + + def test_public_typed_self(self) -> None: + msg = '@public method `self` argument must not be typed: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self: int) -> None: # type: ignore + pass + + def test_view_missing_self(self) -> None: + msg = '@view method must have `self` argument: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop() -> None: # type: ignore + pass + + def test_view_wrong_self(self) -> None: + msg = '@view method first argument must be called `self`: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(wrong) -> None: + pass + + def test_view_typed_self(self) -> None: + msg = '@view method `self` argument must not be typed: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self: int) -> None: # type: ignore + pass + + def test_fallback_missing_self(self) -> None: + msg = '@fallback method must have `self` argument: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback() -> None: # type: ignore + pass + + def test_fallback_wrong_self(self) -> None: + msg = '@fallback method first argument must be called `self`: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(wrong) -> None: + pass + + def test_fallback_typed_self(self) -> None: + msg = '@fallback method `self` argument must not be typed: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self: int) -> None: # type: ignore + pass + + def test_public_missing_context(self) -> None: + msg = '@public method must have `Context` argument: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self) -> None: + pass + + def test_public_context_different_name_success(self) -> None: + class MyBlueprint(Blueprint): + @public + def initialize(self, context: Context) -> None: + pass + + self.nc_catalog.blueprints[self.blueprint_id] = MyBlueprint + self.runner.create_contract(self.contract_id, self.blueprint_id, self.ctx) + + def test_public_context_untyped(self) -> None: + msg = 'argument `ctx` on method `initialize` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx) -> None: # type: ignore + pass + + def test_public_context_wrong_type(self) -> None: + msg = '@public method second arg `ctx` argument must be of type `Context`: `initialize()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: int) -> None: + pass + + def test_fallback_missing_context(self) -> None: + msg = '@fallback method must have `Context` argument: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self) -> None: + pass + + def test_fallback_context_untyped(self) -> None: + msg = 'argument `ctx` on method `fallback` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx) -> None: # type: ignore + pass + + def test_fallback_context_wrong_type(self) -> None: + msg = '@fallback method second arg `ctx` argument must be of type `Context`: `fallback()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: int) -> None: + pass + + def test_view_with_ctx(self) -> None: + msg = '@view method cannot have arg with type `Context`: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, ctx: Context) -> None: + pass + + def test_view_with_context_type(self) -> None: + msg = '@view method cannot have arg with type `Context`: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, a: int, b: Context) -> None: + pass + + def test_cannot_have_multiple_method_types1(self) -> None: + msg = 'method must be annotated with at most one method type: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + @view + def nop(self) -> None: + pass + + def test_cannot_have_multiple_method_types2(self) -> None: + msg = 'method must be annotated with at most one method type: `nop()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + @view + def nop(self) -> None: + pass + + def test_invalid_field_type(self) -> None: + msg = 'unsupported field type `float` on field `a`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + a: float + + @public + def initialize(self, ctx: Context) -> None: + pass + + def test_public_missing_arg_type(self) -> None: + msg = 'argument `a` on method `initialize` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context, a) -> None: # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_public_invalid_arg_type(self) -> None: + msg = 'unsupported type `float` on argument `a` of method `initialize`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context, a: float) -> None: + pass + + def test_public_missing_return_type(self) -> None: + msg = 'missing return type on method `initialize`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context): # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_public_invalid_return_type(self) -> None: + msg = 'unsupported return type `float` on method `initialize`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> float: + return 0 + + def test_view_missing_arg_type(self) -> None: + msg = 'argument `a` on method `nop` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, a) -> None: # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_view_invalid_arg_type(self) -> None: + msg = 'unsupported type `float` on argument `a` of method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self, a: float) -> None: + pass + + def test_view_missing_return_type(self) -> None: + msg = 'missing return type on method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self): + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_view_invalid_return_type(self) -> None: + msg = 'unsupported return type `float` on method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def nop(self) -> float: + return 0 + + def test_fallback_missing_args1(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context) -> None: + pass + + def test_fallback_missing_args2(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str) -> None: + pass + + def test_fallback_missing_arg_type1(self) -> None: + msg = 'argument `method_name` on method `fallback` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name, args_bytes: bytes) -> None: # type: ignore + pass + + def test_fallback_missing_arg_type2(self) -> None: + msg = 'argument `args_bytes` on method `fallback` must be typed' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, args_bytes) -> None: # type: ignore + pass + + def test_fallback_wrong_arg_type1(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: int, args_bytes: bytes) -> None: + pass + + def test_fallback_wrong_arg_type2(self) -> None: + msg = '@fallback method must have these args: `ctx: Context, method_name: str, nc_args: NCArgs`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, args_bytes: int) -> None: + pass + + def test_fallback_missing_return_type(self) -> None: + msg = 'missing return type on method `fallback`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs): # type: ignore + pass + + # TODO + @pytest.mark.skip(reason='code commented on nanocontracts/types.py') + def test_fallback_invalid_return_type(self) -> None: + msg = 'unsupported return type `float` on method `nop`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def fallback(self, ctx: Context, method_name: str, nc_args: NCArgs) -> float: + return 0 + + def test_fallback_wrong_name(self) -> None: + msg = '@fallback method must be called `fallback`: `wrong()`' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @fallback + def wrong(self) -> None: + pass + + def test_fallback_not_annotated(self) -> None: + msg = '`fallback` method must be annotated with @fallback' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + def fallback(self) -> None: + pass + + def test_fallback_view(self) -> None: + msg = '`fallback` method cannot be annotated with @view' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @view + def fallback(self) -> None: + pass + + def test_fallback_public(self) -> None: + msg = '`fallback` method cannot be annotated with @public' + with pytest.raises(BlueprintSyntaxError, match=re.escape(msg)): + class MyBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def fallback(self) -> None: + pass diff --git a/tests/nanocontracts/test_consensus.py b/tests/nanocontracts/test_consensus.py new file mode 100644 index 000000000..ab2982dea --- /dev/null +++ b/tests/nanocontracts/test_consensus.py @@ -0,0 +1,1412 @@ +from typing import Any, cast + +from hathor.conf import HathorSettings +from hathor.crypto.util import get_address_from_public_key_bytes +from hathor.exception import InvalidNewTransaction +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.exception import NCFail, NCInvalidSignature +from hathor.nanocontracts.method import Method +from hathor.nanocontracts.nc_types import make_nc_type_for_type +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import NCAction, NCActionType, NCDepositAction, NCWithdrawalAction, TokenUid +from hathor.nanocontracts.utils import sign_pycoin +from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks +from hathor.transaction import BaseTransaction, Block, Transaction, TxOutput +from hathor.transaction.headers import NanoHeader +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.nc_execution_state import NCExecutionState +from hathor.types import VertexId +from hathor.wallet.base_wallet import WalletOutputInfo +from tests.dag_builder.builder import TestDAGBuilder +from tests.simulation.base import SimulatorTestCase +from tests.utils import add_custom_tx, create_tokens, gen_custom_base_tx + +settings = HathorSettings() + +INT_NC_TYPE = make_nc_type_for_type(int) +TOKEN_NC_TYPE = make_nc_type_for_type(TokenUid) + + +class MyBlueprint(Blueprint): + total: int + token_uid: TokenUid + counter: int + + @public + def initialize(self, ctx: Context, token_uid: TokenUid) -> None: + self.total = 0 + self.counter = 0 + self.token_uid = token_uid + + def _get_action(self, ctx: Context) -> NCAction: + if len(ctx.actions) != 1: + raise NCFail('only one token allowed') + if self.token_uid not in ctx.actions: + raise NCFail('invalid token') + action = ctx.get_single_action(self.token_uid) + if action.token_uid != self.token_uid: + raise NCFail('invalid token') + return action + + @public + def nop(self, ctx: Context, a: int) -> None: + self.counter += 1 + + @public(allow_deposit=True) + def deposit(self, ctx: Context) -> None: + self.counter += 1 + action = self._get_action(ctx) + assert isinstance(action, NCDepositAction) + self.total += action.amount + + @public(allow_withdrawal=True) + def withdraw(self, ctx: Context) -> None: + self.counter += 1 + action = self._get_action(ctx) + assert isinstance(action, NCWithdrawalAction) + self.total -= action.amount + + @public + def fail_on_zero(self, ctx: Context) -> None: + if self.counter == 0: + raise NCFail('counter is zero') + + +class NCConsensusTestCase(SimulatorTestCase): + __test__ = True + + def setUp(self): + super().setUp() + + self.myblueprint_id = b'x' * 32 + self.catalog = NCBlueprintCatalog({ + self.myblueprint_id: MyBlueprint + }) + self.nc_seqnum = 0 + + self.manager = self.simulator.create_peer() + self.manager.allow_mining_without_peers() + self.manager.tx_storage.nc_catalog = self.catalog + + self.wallet = self.manager.wallet + + self.miner = self.simulator.create_miner(self.manager, hashpower=100e6) + self.miner.start() + + self.token_uid = TokenUid(b'\0') + trigger = StopAfterMinimumBalance(self.wallet, self.token_uid, 1) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + def assertNoBlocksVoided(self): + for blk in self.manager.tx_storage.get_all_transactions(): + if not blk.is_block: + continue + meta = blk.get_metadata() + self.assertIsNone(meta.voided_by) + + def _gen_nc_tx( + self, + nc_id: VertexId, + nc_method: str, + nc_args: list[Any], + nc: BaseTransaction | None = None, + *, + address: str | None = None, + nc_actions: list[NanoHeaderAction] | None = None, + is_custom_token: bool = False, + ) -> Transaction: + method_parser = Method.from_callable(getattr(MyBlueprint, nc_method)) + + if nc is None: + nc = Transaction() + assert isinstance(nc, Transaction) + + nc_args_bytes = method_parser.serialize_args_bytes(nc_args) + + if address is None: + address = self.wallet.get_unused_address() + privkey = self.wallet.get_private_key(address) + + nano_header = NanoHeader( + tx=nc, + nc_seqnum=self.nc_seqnum, + nc_id=nc_id, + nc_method=nc_method, + nc_args_bytes=nc_args_bytes, + nc_address=b'', + nc_script=b'', + nc_actions=nc_actions or [], + ) + nc.headers.append(nano_header) + self.nc_seqnum += 1 + + if is_custom_token: + nc.tokens = [self.token_uid] + + sign_pycoin(nano_header, privkey) + self._finish_preparing_tx(nc) + self.manager.reactor.advance(10) + return nc + + def _finish_preparing_tx(self, tx): + tx.timestamp = int(self.manager.reactor.seconds()) + tx.parents = self.manager.get_new_tx_parents() + tx.weight = self.manager.daa.minimum_tx_weight(tx) + return tx + + def _run_invalid_signature(self, attr, value, cause=NCInvalidSignature): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + tx = self._gen_nc_tx(nc.hash, 'deposit', []) + nano_header = tx.get_nano_header() + self.assertNotEqual(getattr(nano_header, attr), value) + setattr(nano_header, attr, value) + tx.weight = self.manager.daa.minimum_tx_weight(tx) + self.manager.cpu_mining_service.resolve(tx) + + tx.clear_sighash_cache() + with self.assertRaises(InvalidNewTransaction) as cm: + self.manager.on_new_tx(tx) + exc = cm.exception + self.assertIsInstance(exc.__cause__, cause) + + def test_nc_consensus_invalid_signature_change_nc_method(self): + self._run_invalid_signature('nc_method', 'withdraw') + + def test_nc_consensus_invalid_signature_change_nc_id(self): + self._run_invalid_signature('nc_id', b'y' * 32) + + def test_nc_consensus_invalid_signature_change_nc_args_bytes(self): + self._run_invalid_signature('nc_args_bytes', b'x') + + def test_nc_consensus_invalid_signature_change_nc_address_1(self): + self._run_invalid_signature('nc_address', b'x', cause=NCInvalidSignature) + + def test_nc_consensus_invalid_signature_change_nc_address_2(self): + privkey = self.wallet.get_key_at_index(100) + pubkey_bytes = privkey.sec() + address = get_address_from_public_key_bytes(pubkey_bytes) + self._run_invalid_signature('nc_address', address) + + def test_nc_consensus_execution_fails(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + tx = self._gen_nc_tx(nc.hash, 'deposit', []) + self.manager.cpu_mining_service.resolve(tx) + self.manager.on_new_tx(tx) + self.assertIsNone(tx.get_metadata().voided_by) + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + meta = tx.get_metadata() + self.assertIsNotNone(meta.first_block) + self.assertEqual(meta.voided_by, {tx.hash, settings.NC_EXECUTION_FAIL_ID}) + + # add another block that confirms tx + self._add_new_block(tx_parents=[ + tx.hash, + tx.parents[0], + ]) + + self.assertNoBlocksVoided() + + def test_nc_consensus_success_custom_token(self): + token_creation_tx = create_tokens(self.manager, mint_amount=100, use_genesis=False, propagate=False) + self._finish_preparing_tx(token_creation_tx) + token_creation_tx.timestamp += 1 + self.manager.cpu_mining_service.resolve(token_creation_tx) + self.manager.on_new_tx(token_creation_tx) + + self.token_uid = token_creation_tx.hash + self.test_nc_consensus_success(is_custom_token=True) + + def test_nc_consensus_success(self, *, is_custom_token: bool = False) -> None: + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + nc_id = nc.hash + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(14400, trigger=trigger)) + nc_loaded = self.manager.tx_storage.get_transaction(nc_id) + nc_loaded_meta = nc_loaded.get_metadata() + self.assertIsNotNone(nc_loaded_meta.first_block) + self.assertIsNone(nc_loaded_meta.voided_by) + + block_initialize = self.manager.tx_storage.get_best_block() + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual(nc_storage.get_obj(b'token_uid', TOKEN_NC_TYPE), self.token_uid) + + # Make a deposit. + + _inputs, deposit_amount = self.wallet.get_inputs_from_amount( + 1, self.manager.tx_storage, token_uid=self.token_uid + ) + tx = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx, is_custom_token=is_custom_token, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=1 if is_custom_token else 0, + amount=deposit_amount, + ) + ]) + self.manager.cpu_mining_service.resolve(tx) + self.manager.on_new_tx(tx) + self.assertIsNone(tx.get_metadata().voided_by) + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + meta = tx.get_metadata() + self.assertIsNotNone(meta.first_block) + self.assertIsNone(meta.voided_by) + + block_deposit = self.manager.tx_storage.get_best_block() + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Make a withdrawal of 1 HTR. + + _output_token_index = 0 + _tokens = [] + if is_custom_token: + _tokens.append(self.token_uid) + _output_token_index = 1 + + tx2 = Transaction(outputs=[TxOutput(1, b'', _output_token_index)]) + tx2.tokens = _tokens + tx2 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=1 if is_custom_token else 0, + amount=1, + ) + ]) + self.manager.cpu_mining_service.resolve(tx2) + self.manager.on_new_tx(tx2) + self.assertIsNone(tx2.get_metadata().voided_by) + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + meta2 = tx2.get_metadata() + self.assertIsNotNone(meta2.first_block) + self.assertIsNone(meta2.voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount - 1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Make a withdrawal of the remainder. + + tx3 = Transaction(outputs=[TxOutput(deposit_amount - 2, b'', _output_token_index)]) + tx3.tokens = _tokens + tx3 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx3, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=1 if is_custom_token else 0, + amount=deposit_amount - 2, + ) + ]) + self.manager.cpu_mining_service.resolve(tx3) + self.manager.on_new_tx(tx3) + self.assertIsNone(tx3.get_metadata().voided_by) + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + meta3 = tx3.get_metadata() + self.assertIsNotNone(meta3.first_block) + self.assertIsNone(meta3.voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual(Balance(value=1, can_mint=False, can_melt=False), nc_storage.get_balance(self.token_uid)) + + # Try to withdraw more than available, so it fails. + + _output_token_index = 0 + _tokens = [] + if is_custom_token: + _tokens.append(self.token_uid) + _output_token_index = 1 + + tx4 = Transaction(outputs=[TxOutput(2, b'', _output_token_index)]) + tx4.tokens = _tokens + tx4 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx4, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=1 if is_custom_token else 0, + amount=2, + ) + ]) + self.manager.cpu_mining_service.resolve(tx4) + self.manager.on_new_tx(tx4) + self.assertIsNone(tx4.get_metadata().voided_by) + + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + meta4 = tx4.get_metadata() + self.assertIsNotNone(meta4.first_block) + self.assertEqual(meta4.voided_by, {tx4.hash, settings.NC_EXECUTION_FAIL_ID}) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual(Balance(value=1, can_mint=False, can_melt=False), nc_storage.get_balance(self.token_uid)) + + self.assertNoBlocksVoided() + + # Check balance at different blocks + + nc_storage = self.manager.get_nc_storage(block_initialize, nc_id) + self.assertEqual(Balance(value=0, can_mint=False, can_melt=False), nc_storage.get_balance(self.token_uid)) + + nc_storage = self.manager.get_nc_storage(block_deposit, nc_id) + self.assertEqual( + Balance(value=deposit_amount, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + def test_nc_consensus_failure_voided_by_propagation(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # tx1 is a NanoContract transaction and will fail execution. + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 1, None), + WalletOutputInfo(address, 1, None), + ] + tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + tx1 = self._gen_nc_tx(nc.hash, 'deposit', [], nc=tx1) + self.manager.cpu_mining_service.resolve(tx1) + self.manager.on_new_tx(tx1) + self.assertIsNone(tx1.get_metadata().voided_by) + + # add tx21 spending tx1 in mempool before tx1 has been executed + tx21 = add_custom_tx(self.manager, tx_inputs=[(tx1, 0)]) + tx21_meta = tx21.get_metadata() + self.assertIsNone(tx21_meta.voided_by) + + # add tx22 with tx1 as parent in mempool before tx1 has been executed + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 1, None), + ] + tx22 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + self._finish_preparing_tx(tx22) + tx22.parents[0] = tx1.hash + self.manager.cpu_mining_service.resolve(tx22) + self.manager.on_new_tx(tx22) + tx22_meta = tx22.get_metadata() + self.assertIsNone(tx22_meta.voided_by) + + # executes tx1 and asserts the final state + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + # confirm that tx1 failed execution. + meta = tx1.get_metadata() + self.assertIsNotNone(meta.first_block) + self.assertEqual(meta.voided_by, {settings.NC_EXECUTION_FAIL_ID, tx1.hash}) + + # tx21 must be voided because it spends an input from tx and tx failed execution. + self.assertEqual(tx21_meta.voided_by, {tx1.hash}) + + # tx22 will not be voided because it just verifies tx1 + tx22_meta = tx22.get_metadata() + self.assertIsNone(tx22_meta.voided_by) + + # add tx31 spending tx1 in mempool after tx1 has been executed + tx31 = add_custom_tx(self.manager, tx_inputs=[(tx1, 1)]) + tx31_meta = tx31.get_metadata() + self.assertEqual(tx31_meta.voided_by, {tx1.hash}) + + # add tx32 spending tx22 in mempool after tx1 has been executed + tx32 = add_custom_tx(self.manager, tx_inputs=[(tx22, 0)]) + self.assertIn(tx1.hash, tx32.parents) + tx32_meta = tx32.get_metadata() + self.assertIsNone(tx32_meta.voided_by) + + # add tx33 in mempool, it spends tx1 with conflict after tx1 has been executed + tx33 = add_custom_tx(self.manager, tx_inputs=[(tx1, 0)]) + tx33_meta = tx33.get_metadata() + self.assertEqual(tx33_meta.voided_by, {tx1.hash, tx33.hash}) + + # confirm that tx1 inputs are unspent (i.e., they are still UTXOs). + tx1in = tx1.inputs[0] + tx1_spent_tx = self.manager.tx_storage.get_transaction(tx1in.tx_id) + tx1_spent_idx = tx1in.index + tx34 = add_custom_tx(self.manager, tx_inputs=[(tx1_spent_tx, tx1_spent_idx)]) + tx34_meta = tx34.get_metadata() + self.assertIsNone(tx34_meta.voided_by) + + self.assertNoBlocksVoided() + + def test_nc_consensus_chain_fail(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # tx1 is a NanoContract transaction and will fail execution. + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 1, None), + WalletOutputInfo(address, 1, None), + ] + tx1 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + tx1 = self._gen_nc_tx(nc.hash, 'deposit', [], nc=tx1) + self.manager.cpu_mining_service.resolve(tx1) + + # tx2 is a NanoContract transaction independent of tx1 + tx2 = self._gen_nc_tx(nc.hash, 'nop', [1]) + self.manager.cpu_mining_service.resolve(tx2) + + # propagate both tx1 and tx2 + self.assertTrue(self.manager.on_new_tx(tx1)) + self.assertTrue(self.manager.on_new_tx(tx2)) + + # tx3 is a NanoContract transaction that has tx1 as parent + tx3 = self._gen_nc_tx(nc.hash, 'nop', [1]) + if tx1.hash not in tx3.parents: + tx3.parents[0] = tx1.hash + tx3.timestamp += 1 + self.manager.cpu_mining_service.resolve(tx3) + self.assertTrue(self.manager.on_new_tx(tx3)) + + # tx4 is a NanoContract transaction that spents tx1 output. + tx4 = gen_custom_base_tx(self.manager, tx_inputs=[(tx1, 0)]) + self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx4) + tx4.timestamp += 2 + # self.assertNotIn(tx1.hash, tx4.parents) + self.manager.cpu_mining_service.resolve(tx4) + self.assertTrue(self.manager.on_new_tx(tx4)) + + # tx5 is a NanoContract transaction that spents tx4 output. + tx5 = gen_custom_base_tx(self.manager, tx_inputs=[(tx4, 0)]) + self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx5) + tx5.timestamp += 3 + # self.assertNotIn(tx1.hash, tx5.parents) + self.manager.cpu_mining_service.resolve(tx5) + self.assertTrue(self.manager.on_new_tx(tx5)) + + # execute all transactions. + trigger = StopAfterNMinedBlocks(self.miner, quantity=2) + self.assertTrue(self.simulator.run(7200, trigger=trigger)) + + # assert state after execution (tx1 fails, tx2 executes) + self.assertEqual(tx1.get_metadata().voided_by, {tx1.hash, settings.NC_EXECUTION_FAIL_ID}) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx3.get_metadata().voided_by) + self.assertEqual(tx4.get_metadata().voided_by, {tx1.hash}) + self.assertEqual(tx5.get_metadata().voided_by, {tx1.hash}) + + nc_storage = self.manager.get_best_block_nc_storage(nc.hash) + self.assertEqual(2, nc_storage.get_obj(b'counter', INT_NC_TYPE)) + + def _add_new_block(self, + *, + parents: list[VertexId] | None = None, + tx_parents: list[VertexId] | None = None, + parent_block_hash: VertexId | None = None) -> Block: + if parents: + assert len(parents) == 3 + assert parent_block_hash is None + assert tx_parents is None + parent_block_hash = parents[0] + tx_parents = parents[1:] + block = self.manager.generate_mining_block(parent_block_hash=parent_block_hash) + if tx_parents is not None: + assert len(tx_parents) == 2 + block.parents[1] = tx_parents[0] + block.parents[2] = tx_parents[1] + self.manager.cpu_mining_service.resolve(block) + self.manager.propagate_tx(block) + return block + + def test_nc_consensus_reorg(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + nc_id = nc.hash + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # Generate two addresses. + address1 = self.wallet.get_address(self.wallet.get_key_at_index(0)) + address2 = self.wallet.get_address(self.wallet.get_key_at_index(1)) + self.assertNotEqual(address1, address2) + + # Prepare three sibling transactions. + _inputs, deposit_amount_1 = self.wallet.get_inputs_from_amount(6500, self.manager.tx_storage) + tx1 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx1 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx1, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_1, + ) + ]) + self.manager.cpu_mining_service.resolve(tx1) + + self.manager.reactor.advance(10) + + withdrawal_amount_1 = deposit_amount_1 - 100 + tx11 = Transaction(outputs=[TxOutput(withdrawal_amount_1, b'', 0)]) + tx11 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx11, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=0, + amount=withdrawal_amount_1, + ) + ]) + tx11.weight += 1 + self.manager.cpu_mining_service.resolve(tx11) + + self.manager.reactor.advance(10) + + _inputs, deposit_amount_2 = self.wallet.get_inputs_from_amount(3, self.manager.tx_storage) + tx2 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx2 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx2, address=address2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_2, + ) + ]) + tx2.weight += 1 + self.manager.cpu_mining_service.resolve(tx2) + + self.assertGreater(deposit_amount_1, deposit_amount_2) + self.assertGreater(withdrawal_amount_1, deposit_amount_2) + + # Propagate tx1, tx2, and tx11. + self.manager.on_new_tx(tx1) + self.manager.on_new_tx(tx2) + self.manager.on_new_tx(tx11) + + # Add a block that executes tx1 and tx11 (but not tx2). + blk10 = self._add_new_block(tx_parents=[ + tx1.hash, + tx1.parents[0], + ]) + blk_base_hash = blk10.parents[0] + + blk11 = self._add_new_block(tx_parents=[ + tx1.hash, + tx11.hash, + ]) + + self.assertEqual(tx1.get_metadata().first_block, blk10.hash) + self.assertIsNone(tx2.get_metadata().first_block) + self.assertEqual(tx11.get_metadata().first_block, blk11.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx11.get_metadata().voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_1 - withdrawal_amount_1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Cause a reorg that will execute tx2 and tx11 (but not tx1). + blk20 = self._add_new_block(parents=[ + blk_base_hash, + tx2.hash, + tx2.parents[0], + ]) + blk21 = self._add_new_block(parents=[ + blk20.hash, + tx2.hash, + tx11.hash, + ]) + self._add_new_block(parents=[ + blk21.hash, + blk21.parents[1], + blk21.parents[2], + ]) + + self.assertIsNone(tx1.get_metadata().first_block) + self.assertEqual(tx2.get_metadata().first_block, blk20.hash) + self.assertEqual(tx11.get_metadata().first_block, blk21.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertEqual(tx11.get_metadata().voided_by, {tx11.hash, settings.NC_EXECUTION_FAIL_ID}) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_2, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + def test_nc_consensus_reorg_fail_before_reorg(self): + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + nc_id = nc.hash + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # Generate two addresses. + address1 = self.wallet.get_address(self.wallet.get_key_at_index(0)) + address2 = self.wallet.get_address(self.wallet.get_key_at_index(1)) + self.assertNotEqual(address1, address2) + + # Prepare three sibling transactions. + _inputs, deposit_amount_2 = self.wallet.get_inputs_from_amount(6500, self.manager.tx_storage) + tx2 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx2 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx2, address=address2, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_2, + ) + ]) + self.manager.cpu_mining_service.resolve(tx2) + + self.manager.reactor.advance(10) + + withdrawal_amount_1 = deposit_amount_2 - 100 + tx11 = Transaction(outputs=[TxOutput(withdrawal_amount_1, b'', 0)]) + tx11 = self._gen_nc_tx(nc_id, 'withdraw', [], nc=tx11, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.WITHDRAWAL, + token_index=0, + amount=withdrawal_amount_1, + ) + ]) + tx11.weight += 1 + self.manager.cpu_mining_service.resolve(tx11) + + self.manager.reactor.advance(10) + + _inputs, deposit_amount_1 = self.wallet.get_inputs_from_amount(1, self.manager.tx_storage) + tx1 = self.wallet.prepare_transaction(Transaction, _inputs, []) + tx1 = self._gen_nc_tx(nc_id, 'deposit', [], nc=tx1, address=address1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=deposit_amount_1, + ) + ]) + tx1.weight += 2 + self.manager.cpu_mining_service.resolve(tx1) + + self.assertGreater(deposit_amount_2, deposit_amount_1) + self.assertGreater(withdrawal_amount_1, deposit_amount_1) + + # Propagate tx1, tx2, and tx11. + self.manager.on_new_tx(tx1) + self.manager.on_new_tx(tx2) + self.manager.on_new_tx(tx11) + + # Add a block that executes tx1 and tx11 (but not tx2). + blk10 = self._add_new_block(tx_parents=[ + tx1.hash, + tx11.hash, + ]) + blk_base_hash = blk10.parents[0] + + self.assertEqual(tx1.get_metadata().first_block, blk10.hash) + self.assertIsNone(tx2.get_metadata().first_block) + self.assertEqual(tx11.get_metadata().first_block, blk10.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertEqual(tx11.get_metadata().voided_by, {tx11.hash, settings.NC_EXECUTION_FAIL_ID}) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + # Cause a reorg that will execute tx2 and tx11 (but not tx1). + blk20 = self._add_new_block(parents=[ + blk_base_hash, + tx2.hash, + tx2.parents[0], + ]) + blk21 = self._add_new_block(parents=[ + blk20.hash, + tx2.hash, + tx11.hash, + ]) + + self.assertIsNone(tx1.get_metadata().first_block) + self.assertEqual(tx2.get_metadata().first_block, blk20.hash) + self.assertEqual(tx11.get_metadata().first_block, blk21.hash) + + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx11.get_metadata().voided_by) + + nc_storage = self.manager.get_best_block_nc_storage(nc_id) + self.assertEqual( + Balance(value=deposit_amount_2 - withdrawal_amount_1, can_mint=False, can_melt=False), + nc_storage.get_balance(self.token_uid) + ) + + def _prepare_nc_consensus_conflict(self, *, conflict_with_nano: bool) -> tuple[Transaction, ...]: + nc = self._gen_nc_tx(self.myblueprint_id, 'initialize', [self.token_uid]) + self.manager.cpu_mining_service.resolve(nc) + self.manager.on_new_tx(nc) + self.assertIsNone(nc.get_metadata().voided_by) + + # Find some blocks. + self.assertTrue(self.simulator.run(600)) + + # tx0 is a regular transaction with one output + address = self.wallet.get_unused_address_bytes() + _outputs = [ + WalletOutputInfo(address, 10, None), + ] + tx0 = self.wallet.prepare_transaction_compute_inputs(Transaction, _outputs, self.manager.tx_storage) + self._finish_preparing_tx(tx0) + self.manager.cpu_mining_service.resolve(tx0) + self.manager.reactor.advance(60) + + # tx1 is a NanoContract transaction and will fail execution. + tx1 = gen_custom_base_tx(self.manager, tx_inputs=[(tx0, 0)]) + self.assertEqual(len(tx1.outputs), 1) + tx1.outputs[0].value = 3 + tx1 = self._gen_nc_tx(nc.hash, 'deposit', [], nc=tx1, nc_actions=[ + NanoHeaderAction( + type=NCActionType.DEPOSIT, + token_index=0, + amount=tx0.outputs[0].value - 3, + ) + ]) + self.manager.cpu_mining_service.resolve(tx1) + + # tx2 is a NanoContract transaction that spends tx1. + tx2 = gen_custom_base_tx(self.manager, tx_inputs=[(tx1, 0)]) + tx2 = self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx2) + self.manager.cpu_mining_service.resolve(tx2) + + # tx1b is in conflict with tx1 + if conflict_with_nano: + tx1b = gen_custom_base_tx(self.manager, tx_inputs=[(tx0, 0)]) + self._gen_nc_tx(nc.hash, 'nop', [1], nc=tx1b) + else: + tx1b = gen_custom_base_tx(self.manager, tx_inputs=[(tx0, 0)]) + self.manager.cpu_mining_service.resolve(tx1b) + + # propagate both tx1 and tx2 + self.assertTrue(self.manager.on_new_tx(tx0)) + self.assertTrue(self.manager.on_new_tx(tx1)) + self.assertTrue(self.manager.on_new_tx(tx1b)) + self.assertTrue(self.manager.on_new_tx(tx2)) + + return cast(tuple[Transaction, ...], (tx0, tx1, tx1b, tx2)) + + def _run_nc_consensus_conflict_block_voided_1(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block must be voided because it confirms both tx1 and tx1b. + block = self.manager.generate_mining_block() + block.parents = [ + block.parents[0], + tx1.hash, + tx1b.hash, + ] + self.manager.cpu_mining_service.resolve(block) + self.assertTrue(self.manager.on_new_tx(block)) + self.assertTrue(block.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_voided_1(self) -> None: + self._run_nc_consensus_conflict_block_voided_1(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_voided_1(self) -> None: + self._run_nc_consensus_conflict_block_voided_1(conflict_with_nano=True) + + def _run_nc_consensus_conflict_block_voided_2(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block will be executed. + b0 = self.manager.generate_mining_block() + b0.parents = [ + b0.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b0) + self.assertTrue(self.manager.on_new_tx(b0)) + self.assertIsNone(b0.get_metadata().voided_by) + + # this block will be voided because it confirms tx1b. + b1 = self.manager.generate_mining_block() + b1.parents = [ + b1.parents[0], + tx1b.hash, + tx1b.parents[0], + ] + self.manager.cpu_mining_service.resolve(b1) + self.assertTrue(self.manager.on_new_tx(b1)) + self.assertIsNotNone(b1.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_voided_2(self) -> None: + self._run_nc_consensus_conflict_block_voided_2(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_voided_2(self) -> None: + self._run_nc_consensus_conflict_block_voided_2(conflict_with_nano=True) + + def _run_nc_consensus_conflict_block_executed_1(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block will be confirmed first. + b0 = self.manager.generate_mining_block() + b0.parents = [ + b0.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b0) + + # this block will cause a reorg. + b1 = self.manager.generate_mining_block() + b1.weight += 1 + b1.parents = [ + b1.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b1) + + self.assertTrue(self.manager.on_new_tx(b0)) + self.assertIsNone(b0.get_metadata().voided_by) + self.assertTrue(self.manager.on_new_tx(b1)) + self.assertIsNotNone(b0.get_metadata().voided_by) + self.assertIsNone(b1.get_metadata().voided_by) + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNotNone(tx1b.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_executed_1(self) -> None: + self._run_nc_consensus_conflict_block_executed_1(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_executed_1(self) -> None: + self._run_nc_consensus_conflict_block_executed_1(conflict_with_nano=True) + + def _run_nc_consensus_conflict_block_executed_2(self, *, conflict_with_nano: bool) -> None: + tx0, tx1, tx1b, tx2 = self._prepare_nc_consensus_conflict(conflict_with_nano=conflict_with_nano) + + # this block is executed. + b0 = self.manager.generate_mining_block() + b0.parents = [ + b0.parents[0], + tx1b.hash, + tx1b.parents[0], + ] + self.manager.cpu_mining_service.resolve(b0) + + # this block will cause a reorg. + b1 = self.manager.generate_mining_block() + b1.weight += 1 + b1.parents = [ + b1.parents[0], + tx1.hash, + tx2.hash, + ] + self.manager.cpu_mining_service.resolve(b1) + + self.assertTrue(self.manager.on_new_tx(b0)) + self.assertIsNone(b0.get_metadata().voided_by) + self.assertIsNotNone(tx1.get_metadata().voided_by) + self.assertIsNotNone(tx2.get_metadata().voided_by) + self.assertIsNone(tx1b.get_metadata().voided_by) + + self.assertTrue(self.manager.on_new_tx(b1)) + self.assertIsNotNone(b0.get_metadata().voided_by) + self.assertIsNone(b1.get_metadata().voided_by) + self.assertIsNone(tx1.get_metadata().voided_by) + self.assertIsNone(tx2.get_metadata().voided_by) + self.assertIsNotNone(tx1b.get_metadata().voided_by) + + def test_nc_consensus_conflict_block_executed_2(self) -> None: + self._run_nc_consensus_conflict_block_executed_2(conflict_with_nano=False) + + def test_nc_consensus_nano_conflict_block_executed_2(self) -> None: + self._run_nc_consensus_conflict_block_executed_2(conflict_with_nano=True) + + def test_nc_consensus_voided_tx_at_mempool(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + vertices = dag_builder.build_from_str(f''' + blockchain genesis b[1..40] + b30 < dummy + + tx1.nc_id = "{self.myblueprint_id.hex()}" + tx1.nc_method = initialize("00") + + # tx2 will fail because it does not have a deposit + tx2.nc_id = tx1 + tx2.nc_method = deposit() + tx2.out[0] <<< tx3 + + # tx3 will be voided because tx2 failed execution + tx3.nc_id = tx1 + tx3.nc_method = nop(1) + + b31 --> tx1 + b32 --> tx2 + b33 --> tx3 + ''') + + for node, vertex in vertices.list: + print() + print(node.name) + print() + self.manager.on_new_tx(vertex) + + b31 = vertices.by_name['b31'].vertex + b32 = vertices.by_name['b32'].vertex + b33 = vertices.by_name['b33'].vertex + + self.assertIsInstance(b31, Block) + self.assertIsInstance(b32, Block) + self.assertIsInstance(b33, Block) + self.assertIsNone(b31.get_metadata().voided_by) + self.assertIsNone(b32.get_metadata().voided_by) + self.assertIsNone(b33.get_metadata().voided_by) + + tx1 = vertices.by_name['tx1'].vertex + tx2 = vertices.by_name['tx2'].vertex + tx3 = vertices.by_name['tx3'].vertex + + meta1 = tx1.get_metadata() + meta2 = tx2.get_metadata() + meta3 = tx3.get_metadata() + + self.assertEqual(meta1.first_block, b31.hash) + self.assertEqual(meta2.first_block, b32.hash) + self.assertEqual(meta3.first_block, b33.hash) + + self.assertIsNone(meta1.voided_by) + self.assertEqual(meta2.voided_by, {tx2.hash, self._settings.NC_EXECUTION_FAIL_ID}) + self.assertEqual(meta3.voided_by, {tx2.hash}) + + def test_reexecute_fail_on_reorg_different_blocks(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + # nc2 will fail because it does not have a deposit + nc2.nc_id = nc1 + nc2.nc_method = deposit() + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2 (which fails again). + # nc2 and nc3 are in different blocks. + b33 < a32 + nc2 <-- a32 + nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + found_b33 = False + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + if node.name == 'b33': + found_b33 = True + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + assert found_b33 + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a32.hash + assert nc3.get_metadata().first_block == a33.hash + + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + def test_reexecute_fail_on_reorg_same_block(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + # nc2 will fail because it does not have a deposit + nc2.nc_id = nc1 + nc2.nc_method = deposit() + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2 (which fails again). + # nc2 and nc3 are in the same block. + b33 < a32 + nc2 <-- nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + + found_b33 = False + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + if node.name == 'b33': + found_b33 = True + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + assert found_b33 + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a33.hash + assert nc3.get_metadata().first_block == a33.hash + + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + def test_reexecute_success_on_reorg_different_blocks(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + nc1.nc_address = wallet1 + nc1.nc_seqnum = 1 + + # nc2 will fail because nc1.counter is 0 + nc2.nc_id = nc1 + nc2.nc_method = fail_on_zero() + nc2.nc_address = wallet1 + nc2.nc_seqnum = 3 # we skip 2 because nc4 will use it below + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc3.nc_address = wallet1 + nc3.nc_seqnum = 4 + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2. + # this time it succeeds because nc4 in the new chain increments nc1.counter to 1, before nc2. + # nc2 and nc3 are in different blocks. + + nc4.nc_id = nc1 + nc4.nc_method = nop(1) + nc4.nc_address = wallet1 + nc4.nc_seqnum = 2 + nc4 < nc2 + nc4 <-- a32 + + b33 < a32 + nc2 <-- a32 + nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3, nc4 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3', 'nc4'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + assert nc4.is_nano_contract() + + artifacts.propagate_with(self.manager, up_to='b33') + + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + assert nc4.get_metadata().first_block is None + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + artifacts.propagate_with(self.manager) + + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc3.get_metadata().voided_by is None + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a32.hash + assert nc3.get_metadata().first_block == a33.hash + assert nc4.get_metadata().first_block == a32.hash + + # increments by nc4 and nc3 + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 2 + + def test_reexecute_success_on_reorg_same_block(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..33] + blockchain b31 a[32..34] + b30 < dummy + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + # nc2 will fail because nc1.counter is 0 + nc2.nc_id = nc1 + nc2.nc_method = fail_on_zero() + + # nc3 will be voided because nc2 failed execution + nc3.nc_id = nc1 + nc3.nc_method = nop(1) + nc2.out[0] <<< nc3 + + nc1 <-- b31 + nc2 <-- b32 + nc3 <-- b33 + + # a34 will generate a reorg, reexecuting nc2. + # this time it succeeds because nc4 in the new chain increments nc1.counter to 1, before nc2. + # nc2 and nc3 are in different blocks. + + nc4.nc_id = nc1 + nc4.nc_method = nop(1) + nc4 < nc2 + nc4 <-- a32 + + b33 < a32 + nc2 <-- nc3 <-- a33 + ''') + + b31, b32, b33 = artifacts.get_typed_vertices(['b31', 'b32', 'b33'], Block) + a32, a33, a34 = artifacts.get_typed_vertices(['a32', 'a33', 'a34'], Block) + nc1, nc2, nc3, nc4 = artifacts.get_typed_vertices(['nc1', 'nc2', 'nc3', 'nc4'], Transaction) + + assert nc1.is_nano_contract() + assert nc2.is_nano_contract() + assert nc3.is_nano_contract() + assert nc4.is_nano_contract() + + found_b33 = False + for node, vertex in artifacts.list: + assert self.manager.on_new_tx(vertex) + + if node.name == 'b33': + found_b33 = True + assert b33.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by == {nc2.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert nc3.get_metadata().voided_by == {nc2.hash} + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == b32.hash + assert nc3.get_metadata().first_block == b33.hash + assert nc4.get_metadata().first_block is None + + assert self.manager.get_nc_storage(b33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 0 + + assert found_b33 + assert b33.get_metadata().voided_by == {b33.hash} + assert a34.get_metadata().voided_by is None + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc3.get_metadata().voided_by is None + assert nc4.get_metadata().voided_by is None + + assert nc1.get_metadata().first_block == b31.hash + assert nc2.get_metadata().first_block == a33.hash + assert nc3.get_metadata().first_block == a33.hash + assert nc4.get_metadata().first_block == a32.hash + + # increments by nc4 and nc3 + assert self.manager.get_nc_storage(a33, nc1.hash).get_obj(b'counter', INT_NC_TYPE) == 2 + + def test_back_to_mempool(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..32] + blockchain b31 a[32..34] + b30 < dummy + + a34.weight = 40 + + nc1.nc_id = "{self.myblueprint_id.hex()}" + nc1.nc_method = initialize("00") + + nc1 <-- b32 + + # a34 will generate a reorg, moving nc1 back to mempool + b32 < a32 + ''') + + artifacts.propagate_with(self.manager) + + b32, a34 = artifacts.get_typed_vertices(['b32', 'a34'], Block) + nc1 = artifacts.get_typed_vertex('nc1', Transaction) + + assert b32.get_metadata().voided_by == {b32.hash} + assert a34.get_metadata().voided_by is None + + assert nc1.is_nano_contract() + nc1_meta = nc1.get_metadata() + + assert nc1_meta.first_block is None + assert nc1_meta.voided_by is None + assert nc1_meta.nc_execution is NCExecutionState.PENDING + assert nc1_meta.nc_calls is None + + def test_nc_consensus_voided_tx_propagation_to_blocks(self) -> None: + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..50] + b30 < dummy + + tx1.nc_id = "{self.myblueprint_id.hex()}" + tx1.nc_method = initialize("00") + + tx2.nc_id = tx1 + tx2.nc_method = nop(1) + + # tx3 will fail because it does not have a deposit + tx3.nc_id = tx1 + tx3.nc_method = deposit() + + # tx4 will be voided because tx3 is voided + tx4.nc_id = tx1 + tx4.nc_method = nop(1) + tx2.out[0] <<< tx4 + tx3.out[0] <<< tx4 + + # As tx4 failed, tx5 is trying to spend the unspent output of tx2. + tx5.nc_id = tx1 + tx5.nc_method = nop(1) + tx2.out[0] <<< tx5 + + b31 --> tx1 + b32 --> tx2 + b33 --> tx3 + b34 --> tx4 + + b50 < tx5 + ''') + + artifacts.propagate_with(self.manager) + + tx1, tx2, tx3, tx4, tx5 = artifacts.get_typed_vertices(['tx1', 'tx2', 'tx3', 'tx4', 'tx5'], Transaction) + + assert tx1.get_metadata().voided_by is None + assert tx2.get_metadata().voided_by is None + assert tx3.get_metadata().voided_by == {tx3.hash, self._settings.NC_EXECUTION_FAIL_ID} + assert tx4.get_metadata().voided_by == {tx3.hash, tx4.hash} + assert tx5.get_metadata().voided_by is None + + assert tx1.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx2.get_metadata().nc_execution is NCExecutionState.SUCCESS + assert tx3.get_metadata().nc_execution is NCExecutionState.FAILURE + assert tx4.get_metadata().nc_execution is NCExecutionState.SKIPPED + assert tx5.get_metadata().nc_execution is None + + b33, b34, b50 = artifacts.get_typed_vertices(['b33', 'b34', 'b50'], Block) + + self.assertIsNone(b33.get_metadata().voided_by) + self.assertIsNone(b34.get_metadata().voided_by) + self.assertIsNone(b50.get_metadata().voided_by) diff --git a/tests/nanocontracts/test_context.py b/tests/nanocontracts/test_context.py new file mode 100644 index 000000000..2251c40ff --- /dev/null +++ b/tests/nanocontracts/test_context.py @@ -0,0 +1,85 @@ +import copy + +from hathor.nanocontracts import Blueprint, public +from hathor.nanocontracts.catalog import NCBlueprintCatalog +from hathor.nanocontracts.context import Context +from hathor.nanocontracts.vertex_data import NanoHeaderData, VertexData +from hathor.transaction import Block, Transaction +from hathor.transaction.base_transaction import TxVersion +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +GLOBAL_VERTEX_DATA: VertexData | None = None + + +class RememberVertexDataBlueprint(Blueprint): + @public + def initialize(self, ctx: Context) -> None: + pass + + @public + def remember_context(self, ctx: Context) -> None: + global GLOBAL_VERTEX_DATA + GLOBAL_VERTEX_DATA = copy.deepcopy(ctx.vertex) + + +class ContextTestCase(BlueprintTestCase): + def setUp(self) -> None: + global GLOBAL_VERTEX_DATA + + super().setUp() + + self.blueprint_id = self.gen_random_contract_id() + self.manager.tx_storage.nc_catalog = NCBlueprintCatalog({ + self.blueprint_id: RememberVertexDataBlueprint, + }) + self.address = self.gen_random_address() + + # clear vertex-data before and after + GLOBAL_VERTEX_DATA = None + + def tearDown(self) -> None: + global GLOBAL_VERTEX_DATA + + super().tearDown() + # clear vertex-data before and after + GLOBAL_VERTEX_DATA = None + + def test_vertex_data(self) -> None: + global GLOBAL_VERTEX_DATA + + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..12] + b10 < dummy + nc1.nc_id = "{self.blueprint_id.hex()}" + nc1.nc_method = initialize() + nc1 <-- b11 + nc2.nc_id = nc1 + nc2.nc_method = remember_context() + nc1 <-- nc2 <-- b12 + ''') + artifacts.propagate_with(self.manager) + b12, = artifacts.get_typed_vertices(['b12'], Block) + nc1, nc2 = artifacts.get_typed_vertices(['nc1', 'nc2'], Transaction) + + # this is the vertex data that was observed by nc2 when remember_context was called + assert GLOBAL_VERTEX_DATA is not None + vertex_data = copy.deepcopy(GLOBAL_VERTEX_DATA) + + # XXX: nonce varies, even for a weight of 1.0 + # XXX: inptus/outputs/parents ignored since the dag builder will pick whatever to fill it in + + self.assertEqual(vertex_data.version, TxVersion.REGULAR_TRANSACTION) + self.assertEqual(vertex_data.hash, nc2.hash) + self.assertEqual(vertex_data.signal_bits, 0) + self.assertEqual(vertex_data.weight, 1.0) + self.assertEqual(vertex_data.tokens, ()) + self.assertEqual(vertex_data.block.hash, b12.hash) + self.assertEqual(vertex_data.block.timestamp, b12.timestamp) + self.assertEqual(vertex_data.block.height, b12.get_height()) + nano_header_data, = vertex_data.headers + assert isinstance(nano_header_data, NanoHeaderData) + self.assertEqual(nano_header_data.nc_id, nc1.hash) + self.assertEqual(nano_header_data.nc_method, 'remember_context') + self.assertEqual(nano_header_data.nc_args_bytes, b'\x00') diff --git a/tests/nanocontracts/test_contract_create_contract.py b/tests/nanocontracts/test_contract_create_contract.py new file mode 100644 index 000000000..2f2fe9316 --- /dev/null +++ b/tests/nanocontracts/test_contract_create_contract.py @@ -0,0 +1,346 @@ +from typing import Optional + +from hathor.conf.settings import HATHOR_TOKEN_UID +from hathor.nanocontracts import Blueprint, Context, public +from hathor.nanocontracts.nc_types import NCType, make_nc_type_for_type +from hathor.nanocontracts.storage.contract_storage import Balance +from hathor.nanocontracts.types import ( + BlueprintId, + ContractId, + NCAction, + NCActionType, + NCDepositAction, + NCGrantAuthorityAction, + NCWithdrawalAction, + TokenUid, + VertexId, +) +from hathor.nanocontracts.utils import derive_child_contract_id +from hathor.transaction import Transaction, TxInput, TxOutput +from hathor.transaction.headers.nano_header import NanoHeaderAction +from hathor.transaction.token_creation_tx import TokenCreationTransaction +from tests.dag_builder.builder import TestDAGBuilder +from tests.nanocontracts.blueprints.unittest import BlueprintTestCase + +INT_NC_TYPE = make_nc_type_for_type(int) +CONTRACT_NC_TYPE: NCType[ContractId | None] = make_nc_type_for_type(ContractId | None) # type: ignore[arg-type] + + +class MyBlueprint1(Blueprint): + counter: int + contract: Optional[ContractId] + token_uid: Optional[TokenUid] + + @public(allow_deposit=True, allow_grant_authority=True) + def initialize(self, ctx: Context, blueprint_id: BlueprintId, initial: int, token_uid: Optional[TokenUid]) -> None: + self.token_uid = token_uid + if initial > 0: + token_uid = TokenUid(HATHOR_TOKEN_UID) + action = ctx.get_single_action(token_uid) + salt = b'x' + assert isinstance(action, NCDepositAction) + new_actions: list[NCAction] = [NCDepositAction(token_uid=token_uid, amount=action.amount - initial)] + self.contract, _ = self.syscall.create_contract( + blueprint_id, salt, new_actions, blueprint_id, initial - 1, self.token_uid + ) + else: + self.contract = None + self.counter = initial + + @public + def create_children(self, ctx: Context, blueprint_id: BlueprintId, salt: bytes) -> None: + new_actions: list[NCAction] = [] + if self.token_uid and self.syscall.can_mint(self.token_uid): + new_actions.append(NCGrantAuthorityAction(token_uid=self.token_uid, mint=True, melt=True)) + self.syscall.create_contract(blueprint_id, salt + b'1', new_actions, blueprint_id, 0, self.token_uid) + self.syscall.create_contract(blueprint_id, salt + b'2', new_actions, blueprint_id, 0, self.token_uid) + self.syscall.create_contract(blueprint_id, salt + b'3', new_actions, blueprint_id, 0, self.token_uid) + + @public + def nop(self, ctx: Context) -> None: + pass + + @public(allow_deposit=True) + def mint(self, ctx: Context, amount: int) -> None: + assert self.token_uid is not None + self.syscall.mint_tokens(self.token_uid, amount) + + @public(allow_withdrawal=True) + def withdraw(self, ctx: Context) -> None: + pass + + +class MyBlueprint2(Blueprint): + counter: int + token_uid: Optional[TokenUid] + + @public(allow_grant_authority=True) + def initialize(self, ctx: Context, blueprint_id: BlueprintId, initial: int, token_uid: Optional[TokenUid]) -> None: + self.counter = initial + self.token_uid = token_uid + + @public + def melt(self, ctx: Context, amount: int, contract_id: ContractId) -> None: + assert self.token_uid is not None + action = NCWithdrawalAction(token_uid=self.token_uid, amount=amount) + self.syscall.call_public_method(contract_id, 'withdraw', [action]) + self.syscall.melt_tokens(self.token_uid, amount) + + +class NCBlueprintTestCase(BlueprintTestCase): + def setUp(self): + super().setUp() + self.blueprint1_id = self.gen_random_blueprint_id() + self.blueprint2_id = self.gen_random_blueprint_id() + self.register_blueprint_class(self.blueprint1_id, MyBlueprint1) + self.register_blueprint_class(self.blueprint2_id, MyBlueprint2) + + def test_basic(self) -> None: + counter = 5 + nc1_id = ContractId(VertexId(b'1' * 32)) + + token_uid = TokenUid(HATHOR_TOKEN_UID) + deposit = 100 + actions: list[NCAction] = [NCDepositAction(token_uid=token_uid, amount=deposit)] + address = self.gen_random_address() + ctx = Context(actions, self.get_genesis_tx(), address, timestamp=0) + self.runner.create_contract(nc1_id, self.blueprint1_id, ctx, self.blueprint1_id, counter, None) + + nc_id = nc1_id + expected = counter + remainder = deposit + while True: + nc_storage = self.runner.get_storage(nc_id) + counter = nc_storage.get_obj(b'counter', INT_NC_TYPE) + assert counter == expected + new_nc_id = nc_storage.get_obj(b'contract', CONTRACT_NC_TYPE) + balance = nc_storage.get_balance(token_uid) + if new_nc_id is not None: + expected_nc_id = derive_child_contract_id(nc_id, b'x', self.blueprint1_id) + assert new_nc_id == expected_nc_id + assert balance == Balance(value=expected, can_mint=False, can_melt=False) + remainder -= balance.value + else: + assert balance.value == remainder + break + nc_id = new_nc_id + expected -= 1 + + actions = [] + ctx = Context(actions, self.get_genesis_tx(), address, timestamp=0) + salt = b'123' + self.runner.call_public_method(nc1_id, 'create_children', ctx, self.blueprint1_id, salt) + child1_id = derive_child_contract_id(nc1_id, salt + b'1', self.blueprint1_id) + child2_id = derive_child_contract_id(nc1_id, salt + b'2', self.blueprint1_id) + child3_id = derive_child_contract_id(nc1_id, salt + b'3', self.blueprint1_id) + child4_id = derive_child_contract_id(nc1_id, salt + b'4', self.blueprint1_id) + + assert self.runner.has_contract_been_initialized(child1_id) + assert self.runner.has_contract_been_initialized(child2_id) + assert self.runner.has_contract_been_initialized(child3_id) + assert not self.runner.has_contract_been_initialized(child4_id) + + salt = b'456' + self.runner.call_public_method(child1_id, 'create_children', ctx, self.blueprint1_id, salt) + child1_child1_id = derive_child_contract_id(child1_id, salt + b'1', self.blueprint1_id) + assert self.runner.has_contract_been_initialized(child1_child1_id) + + def test_dag_basic(self) -> None: + salt1 = b'x' + salt11 = salt1 + b'1' + salt2 = b'1' + salt21 = salt2 + b'1' + + dag_builder = TestDAGBuilder.from_manager(self.manager) + artifacts = dag_builder.build_from_str(f''' + blockchain genesis b[1..34] + blockchain b30 c[31..50] + b34 < c31 + b30 < dummy + + c31.weight = 6 + + nc1.nc_id = "{self.blueprint1_id.hex()}" + nc1.nc_method = initialize("{self.blueprint1_id.hex()}", 1, `TKA`) + nc1.nc_deposit = 10 HTR + nc1.out[0] = 200 TKA + + nc2.nc_id = nc1 + nc2.nc_method = create_children("{self.blueprint2_id.hex()}", "{salt1.hex()}") + + nc3.nc_id = child_contract(nc1, "{salt1.hex()}", "{self.blueprint1_id.hex()}") + nc3.nc_method = create_children("{self.blueprint1_id.hex()}", "{salt2.hex()}") + + nc4.nc_id = nc1 + nc4.nc_method = mint(456) + nc4.nc_deposit = 5 HTR + + nc5.nc_id = child_contract(nc3.nc_id, "{salt21.hex()}", "{self.blueprint1_id.hex()}") + nc5.nc_method = nop() + + nc6.nc_id = child_contract(nc2.nc_id, "{salt11.hex()}", "{self.blueprint2_id.hex()}") + nc6.nc_method = melt(123, `nc1`) + + nc1 <-- b31 + b31 < nc2 + nc2 <-- b32 + b32 < nc3 + nc3 <-- nc4 <-- b33 + b33 < nc5 + nc5 <-- nc6 <-- b34 + ''') + + nc1, nc2, nc3, nc4, nc5, nc6 = artifacts.get_typed_vertices( + ['nc1', 'nc2', 'nc3', 'nc4', 'nc5', 'nc6'], + Transaction, + ) + tka = artifacts.get_typed_vertex('TKA', TokenCreationTransaction) + + # TODO: The DAGBuilder currently doesn't support authority inputs/outputs, + # and neither authority actions, so we have to set them manually. Improve this. + nc1.inputs.append(TxInput(tx_id=tka.hash, index=len(tka.outputs) - 1, data=b'')) # melt authority + nc1.inputs.append(TxInput(tx_id=tka.hash, index=len(tka.outputs) - 2, data=b'')) # mint authority + dag_builder._exporter.sign_all_inputs(nc1) + nc1_header = nc1.get_nano_header() + assert len(nc1_header.nc_actions) == 1 + grant_action = NanoHeaderAction( + type=NCActionType.GRANT_AUTHORITY, + token_index=1, + amount=TxOutput.ALL_AUTHORITIES, + ) + nc1_header.nc_actions.append(grant_action) + # XXX: Dirty hack, by purposefully not clearing the cache, we don't have to re-sign the nano header. + # nc1.clear_sighash_cache() + + artifacts.propagate_with(self.manager, up_to='b34') + + assert nc1.get_metadata().voided_by is None + assert nc2.get_metadata().voided_by is None + assert nc3.get_metadata().voided_by is None + assert nc4.get_metadata().voided_by is None + assert nc5.get_metadata().voided_by is None + assert nc6.get_metadata().voided_by is None + + nc1_contract_id = ContractId(VertexId(nc1.hash)) + + contracts = [] + # nc1 + contracts.append(nc1.hash) + contracts.append(derive_child_contract_id(nc1_contract_id, salt1, self.blueprint1_id)) + # nc2 + contracts.append(derive_child_contract_id(nc1_contract_id, salt1 + b'1', self.blueprint2_id)) + contracts.append(derive_child_contract_id(nc1_contract_id, salt1 + b'2', self.blueprint2_id)) + contracts.append(derive_child_contract_id(nc1_contract_id, salt1 + b'3', self.blueprint2_id)) + # nc3 + nc1_child1_contract_id = ContractId(VertexId(contracts[1])) + contracts.append(derive_child_contract_id(nc1_child1_contract_id, salt2 + b'1', self.blueprint1_id)) + contracts.append(derive_child_contract_id(nc1_child1_contract_id, salt2 + b'2', self.blueprint1_id)) + contracts.append(derive_child_contract_id(nc1_child1_contract_id, salt2 + b'3', self.blueprint1_id)) + # nc4, nc5, nc6 + # (empty) + + # Confirm that contract ids are different. + assert len(set(contracts)) == len(contracts) + + runner = self.manager.get_best_block_nc_runner() + for idx, nc_id in enumerate(contracts): + assert runner.has_contract_been_initialized(nc_id), f'index={idx}' + + indexes = self.manager.tx_storage.indexes + + # blueprint_history: blueprint1 + result = set(indexes.blueprint_history.get_newest(self.blueprint1_id)) + expected = {nc1.hash, nc3.hash} + assert result == expected + + # blueprint_history: blueprint2 + result = set(indexes.blueprint_history.get_newest(self.blueprint2_id)) + expected = {nc2.hash} + assert result == expected + + # nc_creation + result = set(indexes.nc_creation.get_newest()) + expected = {nc1.hash, nc2.hash, nc3.hash} + assert result == expected + + # tokens + htr_total = indexes.tokens.get_token_info(HATHOR_TOKEN_UID).get_total() + tka_total = indexes.tokens.get_token_info(tka.hash).get_total() + assert self.manager.tx_storage.get_height_best_block() == 34 + # genesis + # +34 blocks + # -2 from the TKA mint in nc1.out[0] + # -5 from the mint in nc5.nc_method + # +1 from the melt in nc6.nc_method + assert htr_total == self._settings.GENESIS_TOKENS + 34 * self._settings.INITIAL_TOKENS_PER_BLOCK - 2 - 5 + 1 + # 200 from nc1.out[0] + # +456 from nc5.nc_method + # -123 from nc6.nc_method + assert tka_total == 200 + 456 - 123 + + # nc_history + expected_list = [ + {nc1.hash, nc2.hash, nc4.hash, nc6.hash}, + {nc1.hash, nc3.hash}, + {nc2.hash, nc6.hash}, + {nc2.hash}, + {nc2.hash}, + {nc3.hash, nc5.hash}, + {nc3.hash}, + {nc3.hash}, + ] + assert len(contracts) == len(expected_list) + match_list = [] + for nc_id, expected in zip(contracts, expected_list): + result = set(indexes.nc_history.get_newest(nc_id)) + match_list.append(result == expected) + assert all(match_list) + + # Reorg! + artifacts.propagate_with(self.manager) + + runner = self.manager.get_best_block_nc_runner() + for nc_id in contracts: + assert not runner.has_contract_been_initialized(nc_id) + + # blueprint_history: blueprint1 + result = set(indexes.blueprint_history.get_newest(self.blueprint1_id)) + assert result == {nc1.hash} + + # blueprint_history: blueprint2 + result = set(indexes.blueprint_history.get_newest(self.blueprint2_id)) + assert result == set() + + # nc_creation + result = set(indexes.nc_creation.get_newest()) + assert result == {nc1.hash} + + # tokens + htr_total = indexes.tokens.get_token_info(HATHOR_TOKEN_UID).get_total() + tka_total = indexes.tokens.get_token_info(tka.hash).get_total() + assert self.manager.tx_storage.get_height_best_block() == 50 + # TODO: Is there a bug in the token index? It should be 50, not 54 blocks + # genesis + 50 blocks - 2 from the TKA mint in nc1.out[0] + assert htr_total == self._settings.GENESIS_TOKENS + 54 * self._settings.INITIAL_TOKENS_PER_BLOCK - 2 + # 200 from nc1.out[0] + assert tka_total == 200 + + # nc_history + expected_list = [ + {nc1.hash, nc2.hash, nc4.hash}, + {nc3.hash}, + {nc6.hash}, + set(), + set(), + {nc5.hash}, + set(), + set(), + ] + assert len(contracts) == len(expected_list) + match_list = [] + for nc_id, expected in zip(contracts, expected_list): + result = set(indexes.nc_history.get_newest(nc_id)) + match_list.append(result == expected) + assert all(match_list) + + # TODO Clean-up mempool after reorg? diff --git a/tests/unittest.py b/tests/unittest.py index 87e537e87..71cc977a7 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -30,9 +30,10 @@ from hathor.transaction import BaseTransaction, Block, Transaction from hathor.transaction.storage.transaction_storage import TransactionStorage from hathor.types import VertexId -from hathor.util import Random, not_none -from hathor.wallet import BaseWallet, HDWallet, Wallet +from hathor.util import Random, initialize_hd_wallet, not_none +from hathor.wallet import BaseWallet, Wallet from tests.test_memory_reactor_clock import TestMemoryReactorClock +from tests.utils import DEFAULT_WORDS logger = get_logger() main = ut_main @@ -508,18 +509,10 @@ def clean_pending(self, required_to_quiesce: bool = True) -> None: if required_to_quiesce and active: self.fail('Reactor was still active when it was required to be quiescent.') - def get_wallet(self) -> HDWallet: - words = ('bind daring above film health blush during tiny neck slight clown salmon ' - 'wine brown good setup later omit jaguar tourist rescue flip pet salute') - - hd = HDWallet(words=words) - hd._manually_initialize() - return hd - def get_address(self, index: int) -> Optional[str]: """ Generate a fixed HD Wallet and return an address """ - hd = self.get_wallet() + hd = initialize_hd_wallet(DEFAULT_WORDS) if index >= hd.gap_limit: return None