Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions hathor/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@
from hathor.transaction.exceptions import TxValidationError
from hathor.transaction.storage import TransactionStorage
from hathor.transaction.storage.exceptions import TransactionDoesNotExist
from hathor.types import Address, VertexId
from hathor.util import EnvironmentInfo, LogDuration, Random, Reactor, calculate_min_significant_weight, not_none
from hathor.wallet import BaseWallet

Expand Down Expand Up @@ -701,11 +702,10 @@ def add_listen_address(self, addr: str) -> None:
def add_peer_discovery(self, peer_discovery: PeerDiscovery) -> None:
self.peer_discoveries.append(peer_discovery)

def get_new_tx_parents(self, timestamp: Optional[float] = None) -> List[bytes]:
def get_new_tx_parents(self, timestamp: Optional[float] = None) -> List[VertexId]:
"""Select which transactions will be confirmed by a new transaction.

:return: The hashes of the parents for a new transaction.
:rtype: List[bytes(hash)]
"""
timestamp = timestamp or self.reactor.seconds()
parent_txs = self.generate_parent_txs(timestamp)
Expand All @@ -722,7 +722,7 @@ def generate_parent_txs(self, timestamp: Optional[float]) -> 'ParentTxs':
can_include_intervals = sorted(self.tx_storage.get_tx_tips(timestamp - 1))
assert can_include_intervals, 'tips cannot be empty'
max_timestamp = max(int(i.begin) for i in can_include_intervals)
must_include: List[bytes] = []
must_include: List[VertexId] = []
assert len(can_include_intervals) > 0, f'invalid timestamp "{timestamp}", no tips found"'
if len(can_include_intervals) < 2:
# If there is only one tip, let's randomly choose one of its parents.
Expand All @@ -745,7 +745,7 @@ def can_start_mining(self) -> bool:
return True
return self.connections.has_synced_peer()

def get_block_templates(self, parent_block_hash: Optional[bytes] = None,
def get_block_templates(self, parent_block_hash: Optional[VertexId] = None,
timestamp: Optional[int] = None) -> BlockTemplates:
""" Cached version of `make_block_templates`, cache is invalidated when latest_timestamp changes."""
if parent_block_hash is not None:
Expand All @@ -770,7 +770,7 @@ def make_block_templates(self, timestamp: Optional[int] = None) -> Iterator[Bloc
for parent_block_hash in self.tx_storage.get_best_block_tips():
yield self.make_block_template(parent_block_hash, timestamp)

def make_block_template(self, parent_block_hash: bytes, timestamp: Optional[int] = None) -> BlockTemplate:
def make_block_template(self, parent_block_hash: VertexId, timestamp: Optional[int] = None) -> BlockTemplate:
""" Makes a block template using the given parent block.
"""
parent_block = self.tx_storage.get_transaction(parent_block_hash)
Expand All @@ -782,7 +782,7 @@ def make_block_template(self, parent_block_hash: bytes, timestamp: Optional[int]
current_timestamp = timestamp
return self._make_block_template(parent_block, parent_txs, current_timestamp)

def make_custom_block_template(self, parent_block_hash: bytes, parent_tx_hashes: List[bytes],
def make_custom_block_template(self, parent_block_hash: VertexId, parent_tx_hashes: List[VertexId],
timestamp: Optional[int] = None) -> BlockTemplate:
""" Makes a block template using the given parent block and txs.
"""
Expand Down Expand Up @@ -861,8 +861,8 @@ def _make_block_template(self, parent_block: Block, parent_txs: 'ParentTxs', cur
)

def generate_mining_block(self, timestamp: Optional[int] = None,
parent_block_hash: Optional[bytes] = None,
data: bytes = b'', address: Optional[bytes] = None,
parent_block_hash: Optional[VertexId] = None,
data: bytes = b'', address: Optional[Address] = None,
merge_mined: bool = False) -> Union[Block, MergeMinedBlock]:
""" Generates a block ready to be mined. The block includes new issued tokens,
parents, and the weight.
Expand Down Expand Up @@ -1217,10 +1217,10 @@ class ParentTxs(NamedTuple):
included.
"""
max_timestamp: int
can_include: List[bytes]
must_include: List[bytes]
can_include: List[VertexId]
must_include: List[VertexId]

def get_random_parents(self, rng: Random) -> Tuple[bytes, bytes]:
def get_random_parents(self, rng: Random) -> Tuple[VertexId, VertexId]:
""" Get parents from self.parents plus a random choice from self.parents_any to make it 3 in total.

Using tuple as return type to make it explicit that the length is always 2.
Expand All @@ -1230,6 +1230,6 @@ def get_random_parents(self, rng: Random) -> Tuple[bytes, bytes]:
p1, p2 = self.must_include[:] + fill
return p1, p2

def get_all_tips(self) -> List[bytes]:
def get_all_tips(self) -> List[VertexId]:
"""All generated "tips", can_include + must_include."""
return self.must_include + self.can_include
23 changes: 12 additions & 11 deletions hathor/transaction/base_transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
from hathor.transaction.transaction_metadata import TransactionMetadata
from hathor.transaction.util import VerboseCallback, int_to_bytes, unpack, unpack_len
from hathor.transaction.validation_state import ValidationState
from hathor.types import TokenUid, TxOutputScript, VertexId
from hathor.util import classproperty

if TYPE_CHECKING:
Expand Down Expand Up @@ -168,8 +169,8 @@ def __init__(self,
weight: float = 0,
inputs: Optional[List['TxInput']] = None,
outputs: Optional[List['TxOutput']] = None,
parents: Optional[List[bytes]] = None,
hash: Optional[bytes] = None,
parents: Optional[List[VertexId]] = None,
hash: Optional[VertexId] = None,
storage: Optional['TransactionStorage'] = None) -> None:
"""
Nonce: nonce used for the proof-of-work
Expand Down Expand Up @@ -812,7 +813,7 @@ def update_hash(self) -> None:
self.hash = self.calculate_hash()

def start_mining(self, start: int = 0, end: int = MAX_NONCE, sleep_seconds: float = 0.0, update_time: bool = True,
*, should_stop: Callable[[], bool] = lambda: False) -> Optional[bytes]:
*, should_stop: Callable[[], bool] = lambda: False) -> Optional[VertexId]:
"""Starts mining until it solves the problem, i.e., finds the nonce that satisfies the conditions

:param start: beginning of the search interval
Expand Down Expand Up @@ -1102,7 +1103,7 @@ def clone(self) -> 'BaseTransaction':
return new_tx

@abstractmethod
def get_token_uid(self, index: int) -> bytes:
def get_token_uid(self, index: int) -> TokenUid:
raise NotImplementedError

def is_ready_for_validation(self) -> bool:
Expand All @@ -1120,19 +1121,19 @@ def is_ready_for_validation(self) -> bool:
class TxInput:
_tx: BaseTransaction # XXX: used for caching on hathor.transaction.Transaction.get_spent_tx

def __init__(self, tx_id: bytes, index: int, data: bytes) -> None:
def __init__(self, tx_id: VertexId, index: int, data: bytes) -> None:
"""
tx_id: hash of the transaction that contains the output of this input
index: index of the output you are spending from transaction tx_id (1 byte)
data: data to solve output script
"""
assert isinstance(tx_id, bytes), 'Value is %s, type %s' % (str(tx_id), type(tx_id))
assert isinstance(tx_id, VertexId), 'Value is %s, type %s' % (str(tx_id), type(tx_id))
assert isinstance(index, int), 'Value is %s, type %s' % (str(index), type(index))
assert isinstance(data, bytes), 'Value is %s, type %s' % (str(data), type(data))

self.tx_id = tx_id # bytes
self.index = index # int
self.data = data # bytes
self.tx_id = tx_id
self.index = index
self.data = data

def __repr__(self) -> str:
return str(self)
Expand Down Expand Up @@ -1216,14 +1217,14 @@ class TxOutput:

ALL_AUTHORITIES = TOKEN_MINT_MASK | TOKEN_MELT_MASK

def __init__(self, value: int, script: bytes, token_data: int = 0) -> None:
def __init__(self, value: int, script: TxOutputScript, token_data: int = 0) -> None:
"""
value: amount spent (4 bytes)
script: script in bytes
token_data: index of the token uid in the uid list
"""
assert isinstance(value, int), 'value is %s, type %s' % (str(value), type(value))
assert isinstance(script, bytes), 'script is %s, type %s' % (str(script), type(script))
assert isinstance(script, TxOutputScript), 'script is %s, type %s' % (str(script), type(script))
assert isinstance(token_data, int), 'token_data is %s, type %s' % (str(token_data), type(token_data))
if value <= 0 or value > MAX_OUTPUT_VALUE:
raise InvalidOutputValue
Expand Down
22 changes: 11 additions & 11 deletions hathor/transaction/transaction.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
WeightError,
)
from hathor.transaction.util import VerboseCallback, get_deposit_amount, get_withdraw_amount, unpack, unpack_len
from hathor.types import TokenUid, VertexId

if TYPE_CHECKING:
from hathor.transaction.storage import TransactionStorage # noqa: F401
Expand All @@ -63,7 +64,7 @@ class TokenInfo(NamedTuple):


class RewardLockedInfo(NamedTuple):
block_hash: bytes
block_hash: VertexId
blocks_needed: int


Expand All @@ -79,9 +80,9 @@ def __init__(self,
weight: float = 0,
inputs: Optional[List[TxInput]] = None,
outputs: Optional[List[TxOutput]] = None,
parents: Optional[List[bytes]] = None,
tokens: Optional[List[bytes]] = None,
hash: Optional[bytes] = None,
parents: Optional[List[VertexId]] = None,
tokens: Optional[List[TokenUid]] = None,
hash: Optional[VertexId] = None,
storage: Optional['TransactionStorage'] = None) -> None:
"""
Creating new init just to make sure inputs will always be empty array
Expand Down Expand Up @@ -264,7 +265,7 @@ def get_sighash_all_data(self) -> bytes:

return self._sighash_data_cache

def get_token_uid(self, index: int) -> bytes:
def get_token_uid(self, index: int) -> TokenUid:
"""Returns the token uid with corresponding index from the tx token uid list.

Hathor always has index 0, but we don't include it in the token uid list, so other tokens are
Expand All @@ -274,7 +275,6 @@ def get_token_uid(self, index: int) -> bytes:
:type index: int

:return: the token uid
:rtype: bytes
"""
if index == 0:
return settings.HATHOR_TOKEN_UID
Expand Down Expand Up @@ -413,10 +413,10 @@ def verify_outputs(self) -> None:
if output.get_token_index() > len(self.tokens):
raise InvalidToken('token uid index not available: index {}'.format(output.get_token_index()))

def get_token_info_from_inputs(self) -> Dict[bytes, TokenInfo]:
def get_token_info_from_inputs(self) -> Dict[TokenUid, TokenInfo]:
"""Sum up all tokens present in the inputs and their properties (amount, can_mint, can_melt)
"""
token_dict: Dict[bytes, TokenInfo] = {}
token_dict: Dict[TokenUid, TokenInfo] = {}

default_info: TokenInfo = TokenInfo(0, False, False)

Expand All @@ -440,7 +440,7 @@ def get_token_info_from_inputs(self) -> Dict[bytes, TokenInfo]:

return token_dict

def update_token_info_from_outputs(self, token_dict: Dict[bytes, TokenInfo]) -> None:
def update_token_info_from_outputs(self, token_dict: Dict[TokenUid, TokenInfo]) -> None:
"""Iterate over the outputs and add values to token info dict. Updates the dict in-place.

Also, checks if no token has authorities on the outputs not present on the inputs
Expand Down Expand Up @@ -471,7 +471,7 @@ def update_token_info_from_outputs(self, token_dict: Dict[bytes, TokenInfo]) ->
sum_tokens = token_info.amount + tx_output.value
token_dict[token_uid] = TokenInfo(sum_tokens, token_info.can_mint, token_info.can_melt)

def check_authorities_and_deposit(self, token_dict: Dict[bytes, TokenInfo]) -> None:
def check_authorities_and_deposit(self, token_dict: Dict[TokenUid, TokenInfo]) -> None:
"""Verify that the sum of outputs is equal of the sum of inputs, for each token. If sum of inputs
and outputs is not 0, make sure inputs have mint/melt authority.

Expand Down Expand Up @@ -538,7 +538,7 @@ def verify_inputs(self, *, skip_script: bool = False) -> None:
"""Verify inputs signatures and ownership and all inputs actually exist"""
from hathor.transaction.storage.exceptions import TransactionDoesNotExist

spent_outputs: Set[Tuple[bytes, int]] = set()
spent_outputs: Set[Tuple[VertexId, int]] = set()
for input_tx in self.inputs:
if len(input_tx.data) > settings.MAX_INPUT_DATA_SIZE:
raise InvalidInputDataSize('size: {} and max-size: {}'.format(
Expand Down
23 changes: 23 additions & 0 deletions hathor/types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# Copyright 2021 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# XXX There is a lot of refactor to be done before we can use `NewType`.
# So, let's skip using NewType until everything is refactored.

VertexId = bytes # NewType('TxId', bytes)
Address = bytes # NewType('Address', bytes)
TxOutputScript = bytes # NewType('TxOutputScript', bytes)
Timestamp = int # NewType('Timestamp', int)
TokenUid = VertexId # NewType('TokenUid', VertexId)
Amount = int # NewType('Amount', int)
3 changes: 2 additions & 1 deletion hathor/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@

import hathor
from hathor.conf import HathorSettings
from hathor.types import TokenUid

if TYPE_CHECKING:
import structlog
Expand Down Expand Up @@ -735,7 +736,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
gc.enable()


def is_token_uid_valid(token_uid: bytes) -> bool:
def is_token_uid_valid(token_uid: TokenUid) -> bool:
""" Checks whether a byte sequence can be a valid token UID.

>>> is_token_uid_valid(bytes.fromhex('00'))
Expand Down