Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 37 additions & 46 deletions hathor/conf/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,15 @@

import os
from math import log
from typing import Any, Dict, List, NamedTuple, Optional, Union
from pathlib import Path
from typing import NamedTuple, Optional, Union

import pydantic

from hathor.checkpoint import Checkpoint
from hathor.feature_activation.settings import Settings as FeatureActivationSettings
from hathor.utils import yaml
from hathor.utils.pydantic import BaseModel
from hathor.utils.named_tuple import validated_named_tuple_from_dict

DECIMAL_PLACES = 2

Expand All @@ -40,7 +41,7 @@ class HathorSettings(NamedTuple):
NETWORK_NAME: str

# Initial bootstrap servers
BOOTSTRAP_DNS: List[str] = []
BOOTSTRAP_DNS: list[str] = []

# enable peer whitelist
ENABLE_PEER_WHITELIST: bool = False
Expand Down Expand Up @@ -243,7 +244,7 @@ def MAXIMUM_NUMBER_OF_HALVINGS(self) -> int:
TOKEN_DEPOSIT_PERCENTAGE: float = 0.01

# Array with the settings parameters that are used when calculating the settings hash
P2P_SETTINGS_HASH_FIELDS: List[str] = [
P2P_SETTINGS_HASH_FIELDS: list[str] = [
'P2PKH_VERSION_BYTE',
'MULTISIG_VERSION_BYTE',
'MIN_BLOCK_WEIGHT',
Expand Down Expand Up @@ -359,13 +360,13 @@ def MAXIMUM_NUMBER_OF_HALVINGS(self) -> int:
METRICS_COLLECT_ROCKSDB_DATA_INTERVAL: int = 86400 # 1 day

# Block checkpoints
CHECKPOINTS: List[Checkpoint] = []
CHECKPOINTS: list[Checkpoint] = []

# Used on testing to enable slow asserts that help catch bugs but we don't want to run in production
SLOW_ASSERTS: bool = False

# List of soft voided transaction.
SOFT_VOIDED_TX_IDS: List[bytes] = []
SOFT_VOIDED_TX_IDS: list[bytes] = []

# Identifier used in metadata's voided_by to mark a tx as soft-voided.
SOFT_VOIDED_ID: bytes = b'tx-non-grata'
Expand Down Expand Up @@ -394,30 +395,24 @@ def MAXIMUM_NUMBER_OF_HALVINGS(self) -> int:
@classmethod
def from_yaml(cls, *, filepath: str) -> 'HathorSettings':
"""Takes a filepath to a yaml file and returns a validated HathorSettings instance."""
settings_dict = yaml.dict_from(filepath=filepath)
settings_dict = yaml.dict_from_extended_yaml(filepath=filepath, custom_root=Path(__file__).parent)

return HathorSettings.from_dict(settings_dict)

@classmethod
def from_dict(cls, settings: Dict[str, Any]) -> 'HathorSettings':
"""Takes a settings dict and returns a validated HathorSettings instance."""
# This intermediate step shouldn't be necessary, but for some reason pydantic.create_model_from_namedtuple
# doesn't support default attribute values, so we do this to add them
all_settings = HathorSettings(**settings)
validated_settings = _ValidatedHathorSettings(**all_settings._asdict())

return HathorSettings(**validated_settings.dict())
return validated_named_tuple_from_dict(
HathorSettings,
settings_dict,
validators=_VALIDATORS
)


def _parse_checkpoints(checkpoints: Union[Dict[int, str], List[Checkpoint]]) -> List[Checkpoint]:
def _parse_checkpoints(checkpoints: Union[dict[int, str], list[Checkpoint]]) -> list[Checkpoint]:
"""Parse a dictionary of raw checkpoint data into a list of checkpoints."""
if isinstance(checkpoints, Dict):
if isinstance(checkpoints, dict):
return [
Checkpoint(height, bytes.fromhex(_hash))
for height, _hash in checkpoints.items()
]

if not isinstance(checkpoints, List):
if not isinstance(checkpoints, list):
raise TypeError(f'expected \'Dict[int, str]\' or \'List[Checkpoint]\', got {checkpoints}')

return checkpoints
Expand All @@ -434,29 +429,25 @@ def _parse_hex_str(hex_str: Union[str, bytes]) -> bytes:
return hex_str


_ValidatedHathorSettings = pydantic.create_model_from_namedtuple(
HathorSettings,
__base__=BaseModel,
__validators__=dict(
_parse_hex_str=pydantic.validator(
'P2PKH_VERSION_BYTE',
'MULTISIG_VERSION_BYTE',
'GENESIS_OUTPUT_SCRIPT',
'GENESIS_BLOCK_HASH',
'GENESIS_TX1_HASH',
'GENESIS_TX2_HASH',
pre=True,
allow_reuse=True
)(_parse_hex_str),
_parse_soft_voided_tx_id=pydantic.validator(
'SOFT_VOIDED_TX_IDS',
pre=True,
allow_reuse=True,
each_item=True
)(_parse_hex_str),
_parse_checkpoints=pydantic.validator(
'CHECKPOINTS',
pre=True
)(_parse_checkpoints)
)
_VALIDATORS = dict(
_parse_hex_str=pydantic.validator(
'P2PKH_VERSION_BYTE',
'MULTISIG_VERSION_BYTE',
'GENESIS_OUTPUT_SCRIPT',
'GENESIS_BLOCK_HASH',
'GENESIS_TX1_HASH',
'GENESIS_TX2_HASH',
pre=True,
allow_reuse=True
)(_parse_hex_str),
_parse_soft_voided_tx_id=pydantic.validator(
'SOFT_VOIDED_TX_IDS',
pre=True,
allow_reuse=True,
each_item=True
)(_parse_hex_str),
_parse_checkpoints=pydantic.validator(
'CHECKPOINTS',
pre=True
)(_parse_checkpoints)
)
45 changes: 45 additions & 0 deletions hathor/utils/dict.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
# Copyright 2023 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from copy import deepcopy


def deep_merge(first_dict: dict, second_dict: dict) -> dict:
"""
Recursively merges two dicts, returning a new one with the merged values. Keeps both input dicts intact.

Note: will raise RecursionError if there's a circular reference in both dicts.

>>> dict1 = dict(a=1, b=dict(c=2, d=3), e=dict(f=4))
>>> dict2 = dict(b=dict(d=5, e=6), e=7)
>>> result = deep_merge(dict1, dict2)
>>> result == dict(a=1, b=dict(c=2, d=5, e=6), e=7)
True
>>> dict1 == dict(a=1, b=dict(c=2, d=3), e=dict(f=4))
True
>>> dict2 == dict(b=dict(d=5, e=6), e=7)
True
"""
merged = deepcopy(first_dict)

def do_deep_merge(first: dict, second: dict) -> dict:
for key in second:
if key in first and isinstance(first[key], dict) and isinstance(second[key], dict):
do_deep_merge(first[key], second[key])
else:
first[key] = second[key]

return first

return do_deep_merge(merged, second_dict)
53 changes: 53 additions & 0 deletions hathor/utils/named_tuple.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# Copyright 2023 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Any, NamedTuple, Optional, Type, TypeVar

import pydantic

from hathor.utils.pydantic import BaseModel

T = TypeVar('T', bound=NamedTuple)


def validated_named_tuple_from_dict(
named_tuple_type: Type[T],
attributes_dict: dict[str, Any],
*,
validators: Optional[dict[str, classmethod]] = None
) -> T:
"""
Takes an attributes dict and returns a validated instance of the specified NamedTuple subclass.
Performs validation using pydantic.

Args:
named_tuple_type: the NamedTuple subclass to create an instance from
attributes_dict: a dict with all required attributes for the NamedTuple subclass
validators: custom pydantic validators (read https://docs.pydantic.dev/latest/usage/validators)

Returns: a validated instance of the specified NamedTuple subclass
"""
model = pydantic.create_model_from_namedtuple(
named_tuple_type,
__base__=BaseModel,
__validators__=validators
)

# This intermediate step shouldn't be necessary, but for some reason pydantic.create_model_from_namedtuple
# doesn't support default attribute values, so we do this to add them
all_attributes = named_tuple_type(**attributes_dict)
validated_attributes = model(**all_attributes._asdict())
validated_attributes_dict = {k: v for k, v in validated_attributes}

return named_tuple_type(**validated_attributes_dict)
55 changes: 52 additions & 3 deletions hathor/utils/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,61 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import Any, Dict
import os
from pathlib import Path
from typing import Any, Optional, Union

import yaml

from hathor.utils.dict import deep_merge

def dict_from(*, filepath: str) -> Dict[str, Any]:
_EXTENDS_KEY = 'extends'


def dict_from_yaml(*, filepath: Union[Path, str]) -> dict[str, Any]:
"""Takes a filepath to a yaml file and returns a dictionary with its contents."""
if not os.path.isfile(filepath):
raise ValueError(f"'{filepath}' is not a file")

with open(filepath, 'r') as file:
return yaml.safe_load(file)
contents = yaml.safe_load(file)

if contents is None:
return {}

if not isinstance(contents, dict):
raise ValueError(f"'{filepath}' cannot be parsed as a dictionary")

return contents


def dict_from_extended_yaml(*, filepath: Union[Path, str], custom_root: Optional[Path] = None) -> dict[str, Any]:
"""
Takes a filepath to a yaml file and returns a dictionary with its contents.

Supports extending another yaml file via the 'extends' key in the file. The 'extends' value can be an absolute path
to a yaml file, or a path relative to the base yaml file. The custom_root arg can be provided to set a custom root
for relative paths, taking lower precedence.

Note: the 'extends' key is reserved and will not be present in the returned dictionary.
To opt-out of the extension feature, use dict_from_yaml().
"""
extension_dict = dict_from_yaml(filepath=filepath)
file_to_extend = extension_dict.pop(_EXTENDS_KEY, None)

if not file_to_extend:
return extension_dict

filepath_to_extend = Path(filepath).parent / str(file_to_extend)

if not os.path.isfile(filepath_to_extend) and custom_root:
filepath_to_extend = custom_root / str(file_to_extend)

try:
dict_to_extend = dict_from_extended_yaml(filepath=filepath_to_extend, custom_root=custom_root)
except RecursionError as e:
raise ValueError('Cannot parse yaml with recursive extensions.') from e

extended_dict = deep_merge(dict_to_extend, extension_dict)

return extended_dict
Empty file.
Empty file.
6 changes: 6 additions & 0 deletions tests/utils_modules/fixtures/empty_extends.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
extends:

a: aa
b:
d: dd
e: ee
6 changes: 6 additions & 0 deletions tests/utils_modules/fixtures/invalid_extends.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
extends: ./unknown_file.yml

a: aa
b:
d: dd
e: ee
6 changes: 6 additions & 0 deletions tests/utils_modules/fixtures/mainnet_extends.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
extends: mainnet.yml

a: aa
b:
d: dd
e: ee
1 change: 1 addition & 0 deletions tests/utils_modules/fixtures/number.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
123
6 changes: 6 additions & 0 deletions tests/utils_modules/fixtures/self_extends.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
extends: self_extends.yml

a: aa
b:
d: dd
e: ee
4 changes: 4 additions & 0 deletions tests/utils_modules/fixtures/valid.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
a: 1
b:
c: 2
d: 3
6 changes: 6 additions & 0 deletions tests/utils_modules/fixtures/valid_extends.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
extends: valid.yml

a: aa
b:
d: dd
e: ee
Loading