diff --git a/.github/workflows/TESTING.md b/.github/workflows/TESTING.md new file mode 100644 index 000000000..163fe21ba --- /dev/null +++ b/.github/workflows/TESTING.md @@ -0,0 +1,39 @@ +# Testing Github Action workflows + +It's possible to test the Github Action workflows locally by using https://github.com/nektos/act + +You can start by listing all available jobs in our workflows: + +```bash +act -l +``` + +We have prepared examples on how to test some types of triggers in the `docker.yml` workflow, but it shouldn't be hard to adapt these examples to test other combinations of jobs and triggers. + +## Testing a Tag Push + +To simulate the workflow being trigger by the push of a tag, first generate an event file like this: + +```bash +cat < event.json +{ + "ref": "refs/tags/v0.53.0-rc.1" +} +EOF +``` + +You can change the tag in this event to simulate different types of tags. + +Then, run the `buildx` job with a `push` event providing the event context and a secret called `DOCKERHUB_IMAGE`: + +```bash +act push -e event.json -j buildx -s DOCKERHUB_IMAGE=testing_locally +``` + +## Testing a Scheduled run + +Simulating a scheduled run is similar, just change the type of event: + +```bash +act schedule -j buildx -s DOCKERHUB_IMAGE=testing_locally +``` \ No newline at end of file diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index da37d0c1c..b3a2df32d 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -7,9 +7,6 @@ on: - 'experimental/**' tags: - v* - pull_request: - branches: - - release schedule: - cron: '0 4 * * *' # nightlies at 4am UTC env: @@ -29,81 +26,34 @@ jobs: - '3.8' - '3.9' - '3.10' + - '3.11' exclude: - # XXX: pypy-3.10 does not exist yet + # XXX: neither pypy-3.10 nor pypy-3.11 exist yet, maybe pypy-3.10 will be out on PyPy v7.3.10 - python-impl: pypy python-version: '3.10' + - python-impl: pypy + python-version: '3.11' steps: - name: Checkout uses: actions/checkout@v3 - - name: Prepare tags + - name: Prepare base version id: prep - shell: python run: | - import datetime - import re - def extract_pyver(filename): - for line in open(filename).readlines(): - if line.startswith('ARG PYTHON'): - return line.split('=')[1].strip() - ref = '${{ github.ref }}' - dockerfile_cpython = 'Dockerfile' - dockerfile_pypy = 'Dockerfile.pypy' - default_python = 'python' + extract_pyver(dockerfile_cpython) - default_pypy = 'pypy' + extract_pyver(dockerfile_pypy) - if '${{ github.event_name }}' == 'schedule': - base_version = 'nightly' - elif ref.startswith('refs/tags/'): - base_version = ref[10:].split('-', 1)[0] - elif ref.startswith('refs/heads/'): - base_version = ref[11:].replace('/', '-') - if base_version == '${{ github.event.repository.default_branch }}': - base_version = 'stable' - elif ref.startswith('refs/pull/'): - base_version = 'pr-${{ github.event.number }}' - else: - base_version = 'noop' - if '${{ matrix.python-impl }}' == 'pypy': - dockerfile = dockerfile_pypy - suffix = 'pypy${{ matrix.python-version }}' - else: - dockerfile = dockerfile_cpython - suffix = 'python${{ matrix.python-version }}' - version = base_version + '-' + suffix - tags = {version} - if suffix == default_python: - tags.add(base_version) - elif suffix == default_pypy: - tags.add(base_version + '-pypy') - if re.match(r'^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$', base_version): - minor = base_version.rpartition('.')[0] - tags.add(minor + '-' + suffix) - if suffix == default_python: - tags.add('latest') - elif '${{ github.event_name }}' == 'push': - tags.add('sha-' + '${{ github.sha }}'[:8]) - print('::set-output name=version::' + version) - images = [] - docker_image = '${{ secrets.DOCKERHUB_IMAGE }}' - if docker_image: - images.append(docker_image) - print('::set-output name=login-dockerhub::true') - else: - print('::set-output name=login-dockerhub::false') - ghcr_image = '${{ secrets.GHCR_IMAGE }}' - if ghcr_image: - images.append(ghcr_image) - print('::set-output name=login-ghcr::true') - else: - print('::set-output name=login-ghcr::false') - if images: - print('::set-output name=tags::' + ','.join(f'{i}:{t}' for i in images for t in tags)) - print('::set-output name=push::true') - else: - print('::set-output name=tags::dont-push--local-only') - print('::set-output name=push::false') - print('::set-output name=created::' + datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')) - print('::set-output name=dockerfile::' + dockerfile) + export GITHUB_REF='${{ github.ref }}' + export GITHUB_EVENT_NAME='${{ github.event_name }}' + export GITHUB_SHA='${{ github.sha }}' + export GITHUB_EVENT_DEFAULT_BRANCH='${{ github.event.repository.default_branch }}' + export GITHUB_EVENT_NUMBER='${{ github.event.number }}' + export MATRIX_PYTHON_IMPL='${{ matrix.python-impl }}' + export MATRIX_PYTHON_VERSION='${{ matrix.python-version }}' + export SECRETS_DOCKERHUB_IMAGE='${{ secrets.DOCKERHUB_IMAGE }}' + export SECRETS_GHCR_IMAGE='${{ secrets.GHCR_IMAGE }}' + + python extras/github/docker.py + - name: Check version + if: steps.prep.outputs.check-version + run: | + make check-version VERSION='${{ steps.prep.outputs.check-version }}' - name: Set up QEMU # arm64 is not available natively uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx @@ -114,22 +64,23 @@ jobs: driver-opts: network=host - name: Login to DockerHub uses: docker/login-action@v2 - if: steps.prep.outputs.login-dockerhub + if: steps.prep.outputs.login-dockerhub == 'true' with: username: ${{ secrets.DOCKERHUB_USERNAME }} password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Login to GitHub Container Registry uses: docker/login-action@v2 - if: steps.prep.outputs.login-ghcr + if: steps.prep.outputs.login-ghcr == 'true' with: registry: ghcr.io username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} - name: Cache Docker layers uses: actions/cache@v3 + if: steps.prep_base_version.outputs.is-nightly == 'false' with: path: /tmp/.buildx-cache - # this key is setup such that every branch has its cache and new branches can reuse dev's cache, but not the other way around + # this key is setup such that every branch has its cache and new branches can reuse master's cache, but not the other way around key: ${{ runner.os }}-buildx-${{ matrix.python-impl }}${{ matrix.python-version }}-${{ github.head_ref || github.ref }}-${{ github.sha }} restore-keys: | ${{ runner.os }}-buildx-${{ matrix.python-impl }}${{ matrix.python-version }}-refs/heads/master- @@ -149,6 +100,7 @@ jobs: - name: Build and push uses: docker/build-push-action@v3 continue-on-error: ${{ matrix.python-impl == 'pypy' }} # PyPy is not first-class and has been causing some build failures + if: ${{ !env.ACT }} # Skip this step when testing locally with https://github.com/nektos/act with: context: . file: ${{ steps.prep.outputs.dockerfile }} @@ -169,3 +121,13 @@ jobs: org.opencontainers.image.licenses=${{ github.event.repository.license.spdx_id }} cache-from: type=local,src=/tmp/.buildx-cache cache-to: type=local,dest=/tmp/.buildx-cache + - name: Slack Notification + if: ${{ steps.prep.outputs.slack-notification-version && steps.prep_base_version.outputs.disable-slack-notification == 'false' && job.status == 'success' }} + uses: rtCamp/action-slack-notify@28e8b353eabda5998a2e1203aed33c5999944779 + env: + SLACK_COLOR: ${{ job.status }} # It can turn the job status into a color. Success will be green. + SLACK_MESSAGE: 'We will be deploying this new image soon. Get in touch with the hathor-core team if you want to talk about this deployment.' + SLACK_TITLE: 'Hathor Core - new ${{ steps.prep.outputs.slack-notification-version }} Docker image pushed :rocket:' + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} + SLACK_FOOTER: '' + MSG_MINIMAL: actions url diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8b24838a2..99c9b2bb6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -7,9 +7,6 @@ on: tags: - v* pull_request: - branches: - - dev - - release jobs: matrix: runs-on: ubuntu-latest @@ -22,7 +19,7 @@ jobs: import os import json full_matrix = { - 'python': ['3.8', '3.9', '3.10'], + 'python': ['3.8', '3.9', '3.10', '3.11'], # available OS's: https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idruns-on 'os': ['ubuntu-22.04', 'macos-12', 'windows-2022'], 'include': [ @@ -82,8 +79,8 @@ jobs: if: startsWith(matrix.os, 'macos') run: | brew cleanup -q - brew update -q - brew install -q graphviz rocksdb + # brew update -q + brew install -q graphviz rocksdb pkg-config - name: Install Poetry dependencies run: poetry install -n --no-root - name: Cache mypy diff --git a/.gitignore b/.gitignore index cfb02a5a1..002fa8b74 100644 --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ cover/ /coverage* .mypy_cache +.dmypy.json .pytest_cache extras/docker/_build/ diff --git a/Dockerfile b/Dockerfile index 009b507de..bb0d7bd4a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -29,5 +29,8 @@ ARG PYTHON RUN apt-get -qy update RUN apt-get -qy install libssl1.1 graphviz librocksdb6.11 COPY --from=stage-0 /app/.venv/lib/python${PYTHON}/site-packages/ /usr/local/lib/python${PYTHON}/site-packages/ +# XXX: copy optional BUILD_VERSION file using ...VERSIO[N] instead of ...VERSION* to ensure only one file will be copied +# XXX: also copying the README.md because we need at least one existing file +COPY README.md BUILD_VERSIO[N] / EXPOSE 40403 8080 ENTRYPOINT ["python", "-m", "hathor"] diff --git a/Dockerfile.pypy b/Dockerfile.pypy index 58dd6881e..fe4799ed8 100644 --- a/Dockerfile.pypy +++ b/Dockerfile.pypy @@ -29,5 +29,8 @@ ARG PYTHON RUN apt-get -qy update && apt-get -qy upgrade RUN apt-get -qy install libssl1.1 graphviz librocksdb6.11 COPY --from=stage-0 /app/.venv/lib/pypy${PYTHON}/site-packages/ /opt/pypy/lib/pypy${PYTHON}/site-packages/ +# XXX: copy optional BUILD_VERSION file using ...VERSIO[N] instead of ...VERSION* to ensure only one file will be copied +# XXX: also copying the README.md because we need at least one existing file +COPY README.md BUILD_VERSIO[N] / EXPOSE 40403 8080 ENTRYPOINT ["pypy", "-m", "hathor"] diff --git a/Makefile b/Makefile index 004ecf7ca..d22dbcccc 100644 --- a/Makefile +++ b/Makefile @@ -7,6 +7,7 @@ all: check tests tests_cli = tests/cli/ tests_lib = $(filter-out ${tests_cli} tests/__pycache__/, $(dir $(wildcard tests/*/.))) +tests_ci = extras/github/ pytest_flags = -p no:warnings --cov-report=term --cov-report=html --cov-report=xml --cov=hathor @@ -44,8 +45,12 @@ tests-genesis: HATHOR_TEST_CONFIG_FILE=hathor.conf.mainnet pytest tests/tx/test_genesis.py HATHOR_TEST_CONFIG_FILE=hathor.conf.testnet pytest tests/tx/test_genesis.py +.PHONY: tests-ci +tests-ci: + pytest $(tests_ci) + .PHONY: tests -tests: tests-cli tests-lib tests-genesis +tests: tests-cli tests-lib tests-genesis tests-ci .PHONY: tests-full tests-full: @@ -57,6 +62,10 @@ tests-full: mypy: mypy -p hathor -p tests +.PHONY: dmypy +dmypy: + dmypy run --timeout 86400 -- -p hathor -p tests + .PHONY: flake8 flake8: flake8 $(py_sources) @@ -67,11 +76,14 @@ isort-check: .PHONY: check-version check-version: - bash ./extras/check_version.sh + bash ./extras/check_version.sh $(VERSION) .PHONY: check check: check-version flake8 isort-check mypy +.PHONY: dcheck +dcheck: check-version flake8 isort-check dmypy + # formatting: .PHONY: fmt diff --git a/README.md b/README.md index 3e87ecb54..6ef4ad85f 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ [![Mainnet](https://img.shields.io/badge/mainnet-live-success)](https://explorer.hathor.network/) [![Version](https://img.shields.io/github/v/release/HathorNetwork/hathor-core)](https://github.com/HathorNetwork/hathor-core/releases/latest) -[![Testing](https://img.shields.io/github/workflow/status/HathorNetwork/hathor-core/tests?label=tests&logo=github)](https://github.com/HathorNetwork/hathor-core/actions?query=workflow%3Atests+branch%3Amaster) -[![Docker](https://img.shields.io/github/workflow/status/HathorNetwork/hathor-core/docker?label=build&logo=docker)](https://hub.docker.com/repository/docker/hathornetwork/hathor-core) +[![Testing](https://img.shields.io/github/actions/workflow/status/HathorNetwork/hathor-core/main.yml?branch=master&label=tests&logo=github)](https://github.com/HathorNetwork/hathor-core/actions?query=workflow%3Atests+branch%3Amaster) +[![Docker](https://img.shields.io/github/actions/workflow/status/HathorNetwork/hathor-core/docker.yml?branch=master&label=build&logo=docker)](https://hub.docker.com/repository/docker/hathornetwork/hathor-core) [![Codecov](https://img.shields.io/codecov/c/github/HathorNetwork/hathor-core?logo=codecov)](https://codecov.io/gh/hathornetwork/hathor-core) [![Discord](https://img.shields.io/discord/566500848570466316?logo=discord)](https://discord.com/invite/35mFEhk) [![License](https://img.shields.io/github/license/HathorNetwork/hathor-core)](./LICENSE.txt) @@ -59,11 +59,11 @@ First, you need to have Python >=3.8 installed. If you don't, we recommend you t brew install pyenv ``` - then Python 3.10 (you could check the latest 3.10.x version with `pyenv install --list`): + then Python 3.11 (you could check the latest 3.11.x version with `pyenv install --list`): ``` - pyenv install 3.10.6 - pyenv local 3.10.6 + pyenv install 3.11.0 + pyenv local 3.11.0 pip install -U poetry ``` @@ -75,7 +75,7 @@ First, you need to have Python >=3.8 installed. If you don't, we recommend you t - on Windows 10 (using [winget](https://github.com/microsoft/winget-cli)): ``` - winget install python-3.10 + winget install python-3.11 pip install -U poetry ``` diff --git a/extras/__init__.py b/extras/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/extras/check_version.sh b/extras/check_version.sh index 805d2eafb..e62cfbe01 100755 --- a/extras/check_version.sh +++ b/extras/check_version.sh @@ -1,11 +1,22 @@ #!/bin/bash +### +# This script will check all source files containing the project version and exit with an error code -1 in case +# they don't match. +# +# usage: ./extras/check_version.sh [version] +# +# example: ./extras/check_version.sh 0.52.1 +# +# When a version is provided, it is checked against the package version. +### + OPENAPI_FILE="hathor/cli/openapi_files/openapi_base.json" SRC_FILE="hathor/version.py" PACKAGE_FILE="pyproject.toml" OPENAPI_VERSION=`grep "version\":" ${OPENAPI_FILE} | cut -d'"' -f4` -SRC_VERSION=`grep "__version__" ${SRC_FILE} | cut -d "'" -f2` +SRC_VERSION=`grep "BASE_VERSION =" ${SRC_FILE} | cut -d "'" -f2` PACKAGE_VERSION=`grep '^version' ${PACKAGE_FILE} | cut -d '"' -f2` # For debugging: @@ -25,4 +36,12 @@ if [[ x${PACKAGE_VERSION}x != x${OPENAPI_VERSION}x ]]; then EXITCODE=-1 fi +# We expect an optional argument containing a version string to be checked against the others +if [[ $# -eq 1 ]]; then + if [[ x${PACKAGE_VERSION}x != x$1x ]]; then + echo "Version different in ${PACKAGE_FILE} and passed argument" + EXITCODE=-1 + fi +fi + exit $EXITCODE diff --git a/extras/github/__init__.py b/extras/github/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/extras/github/docker.py b/extras/github/docker.py new file mode 100644 index 000000000..617c9faf1 --- /dev/null +++ b/extras/github/docker.py @@ -0,0 +1,174 @@ +import re +import os +from typing import Dict + +def print_output(output: Dict): + for k, v in output.items(): + print(f'::set-output name={k}::{v}') + + +def prep_base_version(environ: Dict): + GITHUB_REF = environ.get('GITHUB_REF') + GITHUB_EVENT_NAME = environ.get('GITHUB_EVENT_NAME') + GITHUB_SHA = environ.get('GITHUB_SHA') + GITHUB_EVENT_DEFAULT_BRANCH = environ.get('GITHUB_EVENT_DEFAULT_BRANCH') + GITHUB_EVENT_NUMBER = environ.get('GITHUB_EVENT_NUMBER') + GITHUB_REPOSITORY = environ.get('GITHUB_REPOSITORY') + + ref = GITHUB_REF + + # Set base_version according to the github ref type + is_release_candidate = False + is_release = False + is_nightly = False + + overwrite_hathor_core_version = False + + output = {} + + if GITHUB_EVENT_NAME == 'schedule': + commit_short_sha = GITHUB_SHA[:8] + base_version = 'nightly-' + commit_short_sha + is_nightly = True + elif ref.startswith('refs/tags/'): + git_tag = ref[10:] + base_version = git_tag.split('-', 1)[0] + + pre_release = (git_tag.split('-', 1)[1:] or [None])[0] + overwrite_hathor_core_version = True + # This will be used to check against the versions in our source files + check_version = base_version[1:] + output['check-version'] = check_version + + # Check if this is a release-candidate + if pre_release: + if re.match(r'^rc\.[0-9]{1,3}$', pre_release): + base_version = base_version + '-' + pre_release + is_release_candidate = True + else: + raise ValueError(f'Invalid Tag Value: {git_tag}') + else: + is_release = True + elif ref.startswith('refs/heads/'): + base_version = ref[11:].replace('/', '-') + if base_version == GITHUB_EVENT_DEFAULT_BRANCH: + base_version = 'stable' + elif ref.startswith('refs/pull/'): + base_version = 'pr-' + GITHUB_EVENT_NUMBER + else: + base_version = 'noop' + + overwrite_hathor_core_version = is_release or is_release_candidate or is_nightly + # We don't know for sure at this point in which cases we should enable Slack notification, + # but we know when we should disable it for sure + output['disable-slack-notification'] = not (is_release or is_release_candidate) + + if GITHUB_REPOSITORY.lower() != 'hathornetwork/hathor-core': + output['disable-slack-notification'] = True + + return output, base_version, is_release_candidate, overwrite_hathor_core_version + + +def prep_tags(environ: Dict, base_version: str, is_release_candidate: bool): + MATRIX_PYTHON_IMPL = environ.get('MATRIX_PYTHON_IMPL') + MATRIX_PYTHON_VERSION = environ.get('MATRIX_PYTHON_VERSION') + + SECRETS_DOCKERHUB_IMAGE = environ.get('SECRETS_DOCKERHUB_IMAGE') + SECRETS_GHCR_IMAGE = environ.get('SECRETS_GHCR_IMAGE') + + GITHUB_EVENT_NAME = environ.get('GITHUB_EVENT_NAME') + GITHUB_SHA = environ.get('GITHUB_SHA') + + import datetime + import re + + output = {} + + # Extract default python versions from the Dockerfiles + def extract_pyver(filename): + for line in open(filename).readlines(): + if line.startswith('ARG PYTHON'): + return line.split('=')[1].strip() + dockerfile_cpython = 'Dockerfile' + dockerfile_pypy = 'Dockerfile.pypy' + default_python = 'python' + extract_pyver(dockerfile_cpython) + default_pypy = 'pypy' + extract_pyver(dockerfile_pypy) + + # Set which Dockerfile to use based on the versions matrix + if MATRIX_PYTHON_IMPL == 'pypy': + dockerfile = dockerfile_pypy + suffix = 'pypy' + MATRIX_PYTHON_VERSION + else: + dockerfile = dockerfile_cpython + suffix = 'python' + MATRIX_PYTHON_VERSION + + # Build the tag list + + tags = set() + + # We don't want a tag with a python suffix for release-candidates + if is_release_candidate: + version = base_version + else: + version = base_version + '-' + suffix + tags.add(version) + + if suffix == default_python: + tags.add(base_version) + output['slack-notification-version'] = base_version + elif suffix == default_pypy: + tags.add(base_version + '-pypy') + + # Check if this is a stable release + if re.match(r'^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$', base_version): + minor = base_version.rpartition('.')[0] + tags.add(minor + '-' + suffix) + if suffix == default_python: + tags.add('latest') + elif GITHUB_EVENT_NAME == 'push' and not is_release_candidate: + tags.add('sha-' + GITHUB_SHA[:8]) + + # Build the image list and set outputs + output['version'] = version + images = [] + docker_image = SECRETS_DOCKERHUB_IMAGE + if docker_image: + images.append(docker_image) + output['login-dockerhub'] = 'true' + else: + output['login-dockerhub'] = 'false' + ghcr_image = SECRETS_GHCR_IMAGE + if ghcr_image: + images.append(ghcr_image) + output['login-ghcr'] = 'true' + else: + output['login-ghcr'] = 'false' + if images and tags: + output['tags'] = ','.join(f'{i}:{t}' for i in images for t in tags) + output['push'] = 'true' + else: + output['tags'] = 'dont-push--local-only' + output['push'] = 'false' + + output['created'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') + output['dockerfile'] = dockerfile + + return output + + +def overwrite_version(base_version: str): + with open('BUILD_VERSION', 'w') as file: + if base_version.startswith('v'): + base_version = base_version[1:] + file.write(base_version) + + +if __name__ == '__main__': + output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + print_output(output) + + output = prep_tags(os.environ, base_version, is_release_candidate) + print_output(output) + + if overwrite_hathor_core_version: + overwrite_version(base_version) diff --git a/extras/github/test_docker.py b/extras/github/test_docker.py new file mode 100644 index 000000000..d52f0b8dc --- /dev/null +++ b/extras/github/test_docker.py @@ -0,0 +1,199 @@ +import os +import unittest + +from extras.github.docker import prep_base_version, prep_tags + +class DockerWorkflowTest(unittest.TestCase): + def setUp(self): + os.environ.update({ + 'GITHUB_REPOSITORY': 'hathornetwork/hathor-core', + }) + + def test_nightly_build_no_github_secret(self): + os.environ.update({ + 'GITHUB_REF': 'refs/heads/ci/extract-python-scripts', + 'GITHUB_EVENT_NAME': 'schedule', + 'GITHUB_SHA': '55629a7d0ae267cdd27618f452e9f1ad6764fd43', + 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', + 'GITHUB_EVENT_NUMBER': '', + 'MATRIX_PYTHON_IMPL': 'python', + 'MATRIX_PYTHON_VERSION': '3.9', + 'SECRETS_DOCKERHUB_IMAGE': '', + 'SECRETS_GHCR_IMAGE': '', + }) + + output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + + self.assertTrue(overwrite_hathor_core_version) + self.assertFalse(is_release_candidate) + self.assertTrue(output['disable-slack-notification']) + self.assertEqual(base_version, 'nightly-55629a7d') + + output = prep_tags(os.environ, base_version, is_release_candidate) + + self.assertEqual(output['slack-notification-version'], base_version) + self.assertEqual(output['version'], base_version + '-python3.9') + self.assertEqual(output['login-dockerhub'], 'false') + self.assertEqual(output['login-ghcr'], 'false') + self.assertEqual(output['tags'], 'dont-push--local-only') + self.assertEqual(output['push'], 'false') + self.assertEqual(output['dockerfile'], 'Dockerfile') + + def test_nightly_build(self): + os.environ.update({ + 'GITHUB_REF': 'refs/heads/ci/extract-python-scripts', + 'GITHUB_EVENT_NAME': 'schedule', + 'GITHUB_SHA': '55629a7d0ae267cdd27618f452e9f1ad6764fd43', + 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', + 'GITHUB_EVENT_NUMBER': '', + 'MATRIX_PYTHON_IMPL': 'python', + 'MATRIX_PYTHON_VERSION': '3.9', + 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', + 'SECRETS_GHCR_IMAGE': '', + }) + + output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + + self.assertTrue(overwrite_hathor_core_version) + self.assertFalse(is_release_candidate) + self.assertTrue(output['disable-slack-notification']) + self.assertEqual(base_version, 'nightly-55629a7d') + + output = prep_tags(os.environ, base_version, is_release_candidate) + + self.assertEqual(output['slack-notification-version'], base_version) + self.assertEqual(output['version'], base_version + '-python3.9') + self.assertEqual(output['login-dockerhub'], 'true') + self.assertEqual(output['login-ghcr'], 'false') + self.assertEqual(len(output['tags'].split(',')), 2) + self.assertIn('mock_image:nightly-55629a7d', output['tags'].split(',')) + self.assertIn('mock_image:nightly-55629a7d-python3.9', output['tags'].split(',')) + self.assertEqual(output['push'], 'true') + self.assertEqual(output['dockerfile'], 'Dockerfile') + + + def test_release_candidate_non_default_python(self): + os.environ.update({ + 'GITHUB_REF': 'refs/tags/v0.53.0-rc.1', + 'GITHUB_EVENT_NAME': 'push', + 'GITHUB_SHA': '55629a7d0ae267cdd27618f452e9f1ad6764fd43', + 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', + 'GITHUB_EVENT_NUMBER': '', + 'MATRIX_PYTHON_IMPL': 'python', + 'MATRIX_PYTHON_VERSION': '3.11', + 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', + 'SECRETS_GHCR_IMAGE': '', + }) + + output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + + self.assertTrue(overwrite_hathor_core_version) + self.assertTrue(is_release_candidate) + self.assertFalse(output['disable-slack-notification']) + self.assertEqual(base_version, 'v0.53.0-rc.1') + + output = prep_tags(os.environ, base_version, is_release_candidate) + + self.assertNotIn('slack-notification-version', output) + self.assertEqual(output['version'], base_version) + self.assertEqual(output['login-dockerhub'], 'true') + self.assertEqual(output['login-ghcr'], 'false') + self.assertEqual(output['tags'], 'dont-push--local-only') + self.assertEqual(output['push'], 'false') + self.assertEqual(output['dockerfile'], 'Dockerfile') + + def test_release_candidate_default_python(self): + os.environ.update({ + 'GITHUB_REF': 'refs/tags/v0.53.0-rc.1', + 'GITHUB_EVENT_NAME': 'push', + 'GITHUB_SHA': '55629a7d0ae267cdd27618f452e9f1ad6764fd43', + 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', + 'GITHUB_EVENT_NUMBER': '', + 'MATRIX_PYTHON_IMPL': 'python', + 'MATRIX_PYTHON_VERSION': '3.9', + 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', + 'SECRETS_GHCR_IMAGE': '', + }) + + output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + + self.assertTrue(overwrite_hathor_core_version) + self.assertTrue(is_release_candidate) + self.assertFalse(output['disable-slack-notification']) + self.assertEqual(base_version, 'v0.53.0-rc.1') + + output = prep_tags(os.environ, base_version, is_release_candidate) + + self.assertEqual(output['slack-notification-version'], base_version) + self.assertEqual(output['version'], base_version) + self.assertEqual(output['login-dockerhub'], 'true') + self.assertEqual(output['login-ghcr'], 'false') + self.assertEqual(output['tags'], 'mock_image:v0.53.0-rc.1') + self.assertEqual(output['push'], 'true') + self.assertEqual(output['dockerfile'], 'Dockerfile') + + def test_release_default_python(self): + os.environ.update({ + 'GITHUB_REF': 'refs/tags/v0.53.0', + 'GITHUB_EVENT_NAME': 'push', + 'GITHUB_SHA': '55629a7d0ae267cdd27618f452e9f1ad6764fd43', + 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', + 'GITHUB_EVENT_NUMBER': '', + 'MATRIX_PYTHON_IMPL': 'python', + 'MATRIX_PYTHON_VERSION': '3.9', + 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', + 'SECRETS_GHCR_IMAGE': '', + }) + + output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + + self.assertTrue(overwrite_hathor_core_version) + self.assertFalse(is_release_candidate) + self.assertFalse(output['disable-slack-notification']) + self.assertEqual(base_version, 'v0.53.0') + + output = prep_tags(os.environ, base_version, is_release_candidate) + + self.assertEqual(output['slack-notification-version'], base_version) + self.assertEqual(output['version'], base_version + '-python3.9') + self.assertEqual(output['login-dockerhub'], 'true') + self.assertEqual(output['login-ghcr'], 'false') + self.assertEqual(len(output['tags'].split(',')), 4) + self.assertIn('mock_image:v0.53-python3.9', output['tags'].split(',')) + self.assertIn('mock_image:v0.53.0-python3.9', output['tags'].split(',')) + self.assertIn('mock_image:v0.53.0', output['tags'].split(',')) + self.assertIn('mock_image:latest', output['tags'].split(',')) + self.assertEqual(output['push'], 'true') + self.assertEqual(output['dockerfile'], 'Dockerfile') + + def test_release_non_default_python(self): + os.environ.update({ + 'GITHUB_REF': 'refs/tags/v0.53.0', + 'GITHUB_EVENT_NAME': 'push', + 'GITHUB_SHA': '55629a7d0ae267cdd27618f452e9f1ad6764fd43', + 'GITHUB_EVENT_DEFAULT_BRANCH': 'master', + 'GITHUB_EVENT_NUMBER': '', + 'MATRIX_PYTHON_IMPL': 'pypy', + 'MATRIX_PYTHON_VERSION': '3.8', + 'SECRETS_DOCKERHUB_IMAGE': 'mock_image', + 'SECRETS_GHCR_IMAGE': '', + }) + + output, base_version, is_release_candidate, overwrite_hathor_core_version = prep_base_version(os.environ) + + self.assertTrue(overwrite_hathor_core_version) + self.assertFalse(is_release_candidate) + self.assertFalse(output['disable-slack-notification']) + self.assertEqual(base_version, 'v0.53.0') + + output = prep_tags(os.environ, base_version, is_release_candidate) + + self.assertNotIn('slack-notification-version', output) + self.assertEqual(output['version'], 'v0.53.0-pypy3.8') + self.assertEqual(output['login-dockerhub'], 'true') + self.assertEqual(output['login-ghcr'], 'false') + self.assertEqual(len(output['tags'].split(',')), 2) + self.assertIn('mock_image:v0.53-pypy3.8', output['tags'].split(',')) + self.assertIn('mock_image:v0.53.0-pypy3.8', output['tags'].split(',')) + self.assertEqual(output['push'], 'true') + self.assertEqual(output['dockerfile'], 'Dockerfile.pypy') diff --git a/hathor/builder/__init__.py b/hathor/builder/__init__.py new file mode 100644 index 000000000..93271b01a --- /dev/null +++ b/hathor/builder/__init__.py @@ -0,0 +1,22 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.builder.builder import BuildArtifacts, Builder +from hathor.builder.cli_builder import CliBuilder + +__all__ = [ + 'BuildArtifacts', + 'Builder', + 'CliBuilder', +] diff --git a/hathor/builder/builder.py b/hathor/builder/builder.py new file mode 100644 index 000000000..c3815ed27 --- /dev/null +++ b/hathor/builder/builder.py @@ -0,0 +1,458 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum +from typing import Any, Dict, List, NamedTuple, Optional, Set + +from structlog import get_logger + +from hathor.checkpoint import Checkpoint +from hathor.conf import HathorSettings +from hathor.conf.settings import HathorSettings as HathorSettingsType +from hathor.consensus import ConsensusAlgorithm +from hathor.event import EventManager +from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage +from hathor.event.websocket import EventWebsocketFactory +from hathor.indexes import IndexesManager +from hathor.manager import HathorManager +from hathor.p2p.manager import ConnectionsManager +from hathor.p2p.peer_id import PeerId +from hathor.pubsub import PubSubManager +from hathor.storage import RocksDBStorage +from hathor.transaction.storage import TransactionMemoryStorage, TransactionRocksDBStorage, TransactionStorage +from hathor.util import Random, Reactor, get_environment_info +from hathor.wallet import BaseWallet, Wallet + +logger = get_logger() + + +class StorageType(Enum): + MEMORY = 'memory' + ROCKSDB = 'rocksdb' + + +class BuildArtifacts(NamedTuple): + """Artifacts created by a builder.""" + peer_id: PeerId + settings: HathorSettingsType + rng: Random + reactor: Reactor + manager: HathorManager + p2p_manager: ConnectionsManager + pubsub: PubSubManager + consensus: ConsensusAlgorithm + tx_storage: TransactionStorage + indexes: Optional[IndexesManager] + wallet: Optional[BaseWallet] + rocksdb_storage: Optional[RocksDBStorage] + + +class Builder: + """Builder builds the core objects to run a full node. + + Example: + + builder = Builder() + builder.use_memory() + artifacts = builder.build() + """ + def __init__(self) -> None: + self.log = logger.new() + self.artifacts: Optional[BuildArtifacts] = None + + self._settings: HathorSettingsType = HathorSettings() + self._rng: Random = Random() + self._checkpoints: Optional[List[Checkpoint]] = None + self._capabilities: Optional[List[str]] = None + + self._peer_id: Optional[PeerId] = None + self._network: Optional[str] = None + self._cmdline: str = '' + + self._storage_type: StorageType = StorageType.MEMORY + self._force_memory_index: bool = False + + self._event_manager: Optional[EventManager] = None + self._event_ws_factory: Optional[EventWebsocketFactory] = None + + self._rocksdb_path: Optional[str] = None + self._rocksdb_storage: Optional[RocksDBStorage] = None + self._rocksdb_cache_capacity: Optional[int] = None + self._rocksdb_with_index: Optional[bool] = None + + self._tx_storage: Optional[TransactionStorage] = None + self._event_storage: Optional[EventStorage] = None + + self._reactor: Optional[Reactor] = None + self._pubsub: Optional[PubSubManager] = None + + self._wallet: Optional[BaseWallet] = None + self._wallet_directory: Optional[str] = None + self._wallet_unlock: Optional[bytes] = None + + self._enable_address_index: bool = False + self._enable_tokens_index: bool = False + self._enable_utxo_index: bool = False + + self._enable_sync_v1: Optional[bool] = None + self._enable_sync_v2: Optional[bool] = None + + self._stratum_port: Optional[int] = None + + self._full_verification: Optional[bool] = None + + self._soft_voided_tx_ids: Optional[Set[bytes]] = None + + def build(self) -> BuildArtifacts: + if self.artifacts is not None: + raise ValueError('cannot call build twice') + + settings = self._get_settings() + reactor = self._get_reactor() + pubsub = self._get_or_create_pubsub() + + peer_id = self._get_peer_id() + + soft_voided_tx_ids = self._get_soft_voided_tx_ids() + consensus_algorithm = ConsensusAlgorithm(soft_voided_tx_ids, pubsub) + + wallet = self._get_or_create_wallet() + event_storage = self._get_or_create_event_storage() + event_manager = self._get_or_create_event_manager() + tx_storage = self._get_or_create_tx_storage() + indexes = tx_storage.indexes + assert indexes is not None + + if self._enable_address_index: + indexes.enable_address_index(pubsub) + + if self._enable_tokens_index: + indexes.enable_tokens_index() + + if self._enable_utxo_index: + indexes.enable_utxo_index() + + kwargs: Dict[str, Any] = {} + + if self._enable_sync_v1 is not None: + # XXX: the interface of the Builder was kept using v1 instead of v1_1 to minimize the changes needed + kwargs['enable_sync_v1_1'] = self._enable_sync_v1 + + if self._enable_sync_v2 is not None: + kwargs['enable_sync_v2'] = self._enable_sync_v2 + + if self._stratum_port is not None: + kwargs['stratum_port'] = self._stratum_port + + if self._network is None: + raise TypeError('you must set a network') + + if self._full_verification is not None: + kwargs['full_verification'] = self._full_verification + + manager = HathorManager( + reactor, + pubsub=pubsub, + consensus_algorithm=consensus_algorithm, + peer_id=peer_id, + tx_storage=tx_storage, + event_storage=event_storage, + network=self._network, + wallet=wallet, + rng=self._rng, + checkpoints=self._checkpoints, + capabilities=self._capabilities, + environment_info=get_environment_info(self._cmdline, peer_id.id), + event_manager=event_manager, + **kwargs + ) + + self.artifacts = BuildArtifacts( + peer_id=peer_id, + settings=settings, + rng=self._rng, + reactor=reactor, + manager=manager, + p2p_manager=manager.connections, + pubsub=pubsub, + consensus=consensus_algorithm, + tx_storage=tx_storage, + indexes=indexes, + wallet=wallet, + rocksdb_storage=self._rocksdb_storage, + ) + + return self.artifacts + + def check_if_can_modify(self) -> None: + if self.artifacts is not None: + raise ValueError('cannot modify after build() is called') + + def set_event_manager(self, event_manager: EventManager) -> 'Builder': + self.check_if_can_modify() + self._event_manager = event_manager + return self + + def set_rng(self, rng: Random) -> 'Builder': + self.check_if_can_modify() + self._rng = rng + return self + + def set_checkpoints(self, checkpoints: List[Checkpoint]) -> 'Builder': + self.check_if_can_modify() + self._checkpoints = checkpoints + return self + + def set_capabilities(self, capabilities: List[str]) -> 'Builder': + self.check_if_can_modify() + self._capabilities = capabilities + return self + + def set_peer_id(self, peer_id: PeerId) -> 'Builder': + self.check_if_can_modify() + self._peer_id = peer_id + return self + + def _get_settings(self) -> HathorSettingsType: + return self._settings + + def _get_reactor(self) -> Reactor: + if self._reactor is not None: + return self._reactor + raise ValueError('reactor not set') + + def _get_soft_voided_tx_ids(self) -> Set[bytes]: + if self._soft_voided_tx_ids is not None: + return self._soft_voided_tx_ids + + settings = self._get_settings() + + return set(settings.SOFT_VOIDED_TX_IDS) + + def _get_peer_id(self) -> PeerId: + if self._peer_id is not None: + return self._peer_id + raise ValueError('peer_id not set') + + def _get_or_create_pubsub(self) -> PubSubManager: + if self._pubsub is None: + self._pubsub = PubSubManager(self._get_reactor()) + return self._pubsub + + def _get_or_create_rocksdb_storage(self) -> RocksDBStorage: + assert self._rocksdb_path is not None + + if self._rocksdb_storage is not None: + return self._rocksdb_storage + + kwargs = {} + if self._rocksdb_cache_capacity is not None: + kwargs = dict(cache_capacity=self._rocksdb_cache_capacity) + + self._rocksdb_storage = RocksDBStorage( + path=self._rocksdb_path, + **kwargs + ) + + return self._rocksdb_storage + + def _get_or_create_tx_storage(self) -> TransactionStorage: + if self._tx_storage is not None: + return self._tx_storage + + if self._storage_type == StorageType.MEMORY: + return TransactionMemoryStorage() + + if self._storage_type == StorageType.ROCKSDB: + rocksdb_storage = self._get_or_create_rocksdb_storage() + use_memory_index = self._force_memory_index + + kwargs = {} + if self._rocksdb_with_index is not None: + kwargs = dict(with_index=self._rocksdb_with_index) + + return TransactionRocksDBStorage( + rocksdb_storage, + use_memory_indexes=use_memory_index, + **kwargs + ) + + raise NotImplementedError + + def _get_or_create_event_storage(self) -> EventStorage: + if self._event_storage is not None: + pass + elif self._storage_type == StorageType.MEMORY: + self._event_storage = EventMemoryStorage() + elif self._storage_type == StorageType.ROCKSDB: + rocksdb_storage = self._get_or_create_rocksdb_storage() + self._event_storage = EventRocksDBStorage(rocksdb_storage) + else: + raise NotImplementedError + + return self._event_storage + + def _get_or_create_event_manager(self) -> Optional[EventManager]: + if self._event_manager is None and self._event_ws_factory is not None: + self._event_manager = EventManager( + reactor=self._get_reactor(), + pubsub=self._get_or_create_pubsub(), + event_storage=self._get_or_create_event_storage(), + event_ws_factory=self._event_ws_factory + ) + + return self._event_manager + + def use_memory(self) -> 'Builder': + self.check_if_can_modify() + self._storage_type = StorageType.MEMORY + return self + + def use_rocksdb( + self, + path: str, + with_index: Optional[bool] = None, + cache_capacity: Optional[int] = None + ) -> 'Builder': + self.check_if_can_modify() + self._storage_type = StorageType.ROCKSDB + self._rocksdb_path = path + self._rocksdb_with_index = with_index + self._rocksdb_cache_capacity = cache_capacity + return self + + def force_memory_index(self) -> 'Builder': + self.check_if_can_modify() + self._force_memory_index = True + return self + + def _get_or_create_wallet(self) -> Optional[BaseWallet]: + if self._wallet is not None: + assert self._wallet_directory is None + assert self._wallet_unlock is None + return self._wallet + + if self._wallet_directory is None: + return None + wallet = Wallet(directory=self._wallet_directory) + if self._wallet_unlock is not None: + wallet.unlock(self._wallet_unlock) + return wallet + + def set_wallet(self, wallet: BaseWallet) -> 'Builder': + self.check_if_can_modify() + self._wallet = wallet + return self + + def enable_keypair_wallet(self, directory: str, *, unlock: Optional[bytes] = None) -> 'Builder': + self.check_if_can_modify() + self._wallet_directory = directory + self._wallet_unlock = unlock + return self + + def enable_stratum_server(self, port: int) -> 'Builder': + self.check_if_can_modify() + self._stratum_port = port + return self + + def enable_address_index(self) -> 'Builder': + self.check_if_can_modify() + self._enable_address_index = True + return self + + def enable_tokens_index(self) -> 'Builder': + self.check_if_can_modify() + self._enable_tokens_index = True + return self + + def enable_utxo_index(self) -> 'Builder': + self.check_if_can_modify() + self._enable_utxo_index = True + return self + + def enable_wallet_index(self) -> 'Builder': + self.check_if_can_modify() + self.enable_address_index() + self.enable_tokens_index() + return self + + def enable_event_manager(self, *, event_ws_factory: EventWebsocketFactory) -> 'Builder': + self.check_if_can_modify() + self._event_ws_factory = event_ws_factory + return self + + def set_tx_storage(self, tx_storage: TransactionStorage) -> 'Builder': + self.check_if_can_modify() + self._tx_storage = tx_storage + return self + + def set_event_storage(self, event_storage: EventStorage) -> 'Builder': + self.check_if_can_modify() + self._event_storage = event_storage + return self + + def set_reactor(self, reactor: Reactor) -> 'Builder': + self.check_if_can_modify() + self._reactor = reactor + return self + + def set_pubsub(self, pubsub: PubSubManager) -> 'Builder': + self.check_if_can_modify() + self._pubsub = pubsub + return self + + def set_network(self, network: str) -> 'Builder': + self.check_if_can_modify() + self._network = network + return self + + def set_enable_sync_v1(self, enable_sync_v1: bool) -> 'Builder': + self.check_if_can_modify() + self._enable_sync_v1 = enable_sync_v1 + return self + + def set_enable_sync_v2(self, enable_sync_v2: bool) -> 'Builder': + self.check_if_can_modify() + self._enable_sync_v2 = enable_sync_v2 + return self + + def enable_sync_v1(self) -> 'Builder': + self.check_if_can_modify() + self._enable_sync_v1 = True + return self + + def disable_sync_v1(self) -> 'Builder': + self.check_if_can_modify() + self._enable_sync_v1 = False + return self + + def enable_sync_v2(self) -> 'Builder': + self.check_if_can_modify() + self._enable_sync_v2 = True + return self + + def disable_sync_v2(self) -> 'Builder': + self.check_if_can_modify() + self._enable_sync_v2 = False + return self + + def set_full_verification(self, full_verification: bool) -> 'Builder': + self.check_if_can_modify() + self._full_verification = full_verification + return self + + def set_soft_voided_tx_ids(self, soft_voided_tx_ids: Set[bytes]) -> 'Builder': + self.check_if_can_modify() + self._soft_voided_tx_ids = soft_voided_tx_ids + return self diff --git a/hathor/builder.py b/hathor/builder/cli_builder.py similarity index 85% rename from hathor/builder.py rename to hathor/builder/cli_builder.py index 6251d1eb7..740079311 100644 --- a/hathor/builder.py +++ b/hathor/builder/cli_builder.py @@ -26,18 +26,29 @@ from twisted.web import server from twisted.web.resource import Resource +from hathor.consensus import ConsensusAlgorithm +from hathor.event import EventManager +from hathor.event.resources.event import EventResource from hathor.exception import BuilderError +from hathor.indexes import IndexesManager from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId from hathor.p2p.utils import discover_hostname from hathor.prometheus import PrometheusMetricsExporter +from hathor.pubsub import PubSubManager from hathor.wallet import BaseWallet, HDWallet, Wallet logger = get_logger() class CliBuilder: + """CliBuilder builds the core objects from args. + + TODO Refactor to use Builder. It could even be ported to a Builder.from_args classmethod. + """ def __init__(self) -> None: + self.log = logger.new() + self._build_prometheus = False self._build_status = False @@ -47,13 +58,12 @@ def check_or_raise(self, condition: bool, message: str) -> None: raise BuilderError(message) def create_manager(self, reactor: PosixReactorBase, args: Namespace) -> HathorManager: - self.log = logger.new() - import hathor from hathor.conf import HathorSettings from hathor.conf.get_settings import get_settings_module from hathor.daa import TestMode, _set_test_mode from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage, EventStorage + from hathor.event.websocket.factory import EventWebsocketFactory from hathor.p2p.netfilter.utils import add_peer_id_blacklist from hathor.p2p.peer_discovery import BootstrapPeerDiscovery, DNSPeerDiscovery from hathor.storage import RocksDBStorage @@ -87,14 +97,15 @@ def create_manager(self, reactor: PosixReactorBase, args: Namespace) -> HathorMa ) tx_storage: TransactionStorage - rocksdb_storage: RocksDBStorage - event_storage: Optional[EventStorage] = None + event_storage: EventStorage + self.rocksdb_storage: Optional[RocksDBStorage] = None + self.event_ws_factory: Optional[EventWebsocketFactory] = None + if args.memory_storage: self.check_or_raise(not args.data, '--data should not be used with --memory-storage') # if using MemoryStorage, no need to have cache tx_storage = TransactionMemoryStorage() - if args.x_enable_event_queue: - event_storage = EventMemoryStorage() + event_storage = EventMemoryStorage() self.check_or_raise(not args.x_rocksdb_indexes, 'RocksDB indexes require RocksDB data') self.log.info('with storage', storage_class=type(tx_storage).__name__) else: @@ -102,12 +113,11 @@ def create_manager(self, reactor: PosixReactorBase, args: Namespace) -> HathorMa if args.rocksdb_storage: self.log.warn('--rocksdb-storage is now implied, no need to specify it') cache_capacity = args.rocksdb_cache - rocksdb_storage = RocksDBStorage(path=args.data, cache_capacity=cache_capacity) - tx_storage = TransactionRocksDBStorage(rocksdb_storage, + self.rocksdb_storage = RocksDBStorage(path=args.data, cache_capacity=cache_capacity) + tx_storage = TransactionRocksDBStorage(self.rocksdb_storage, with_index=(not args.cache), use_memory_indexes=args.memory_indexes) - if args.x_enable_event_queue: - event_storage = EventRocksDBStorage(rocksdb_storage) + event_storage = EventRocksDBStorage(self.rocksdb_storage) self.log.info('with storage', storage_class=type(tx_storage).__name__, path=args.data) if args.cache: @@ -128,28 +138,67 @@ def create_manager(self, reactor: PosixReactorBase, args: Namespace) -> HathorMa hostname = self.get_hostname(args) network = settings.NETWORK_NAME - enable_sync_v1 = not args.x_sync_v2_only + enable_sync_v1 = args.x_enable_legacy_sync_v1_0 + enable_sync_v1_1 = not args.x_sync_v2_only enable_sync_v2 = args.x_sync_v2_only or args.x_sync_bridge + pubsub = PubSubManager(reactor) + + event_manager: Optional[EventManager] = None + if args.x_enable_event_queue: + self.event_ws_factory = EventWebsocketFactory(reactor, event_storage) + event_manager = EventManager( + event_storage=event_storage, + event_ws_factory=self.event_ws_factory, + pubsub=pubsub, + reactor=reactor, + emit_load_events=args.x_emit_load_events + ) + else: + self.check_or_raise(not args.x_emit_load_events, '--x-emit-load-events cannot be used without ' + '--x-enable-event-queue') + + if args.wallet_index and tx_storage.indexes is not None: + self.log.debug('enable wallet indexes') + self.enable_wallet_index(tx_storage.indexes, pubsub) + + if args.utxo_index and tx_storage.indexes is not None: + self.log.debug('enable utxo index') + tx_storage.indexes.enable_utxo_index() + + full_verification = False + if args.x_full_verification: + self.check_or_raise(not args.x_enable_event_queue, '--x-full-verification cannot be used with ' + '--x-enable-event-queue') + full_verification = True + + soft_voided_tx_ids = set(settings.SOFT_VOIDED_TX_IDS) + consensus_algorithm = ConsensusAlgorithm(soft_voided_tx_ids, pubsub=pubsub) + self.manager = HathorManager( reactor, + pubsub=pubsub, peer_id=peer_id, network=network, hostname=hostname, tx_storage=tx_storage, event_storage=event_storage, + event_manager=event_manager, wallet=self.wallet, - wallet_index=args.wallet_index, - utxo_index=args.utxo_index, stratum_port=args.stratum, ssl=True, checkpoints=settings.CHECKPOINTS, enable_sync_v1=enable_sync_v1, + enable_sync_v1_1=enable_sync_v1_1, enable_sync_v2=enable_sync_v2, - soft_voided_tx_ids=set(settings.SOFT_VOIDED_TX_IDS), + consensus_algorithm=consensus_algorithm, environment_info=get_environment_info(args=str(args), peer_id=peer_id.id), + full_verification=full_verification ) + if args.data: + self.manager.set_cmd_path(args.data) + if args.allow_mining_without_peers: self.manager.allow_mining_without_peers() @@ -174,8 +223,6 @@ def create_manager(self, reactor: PosixReactorBase, args: Namespace) -> HathorMa if self.wallet: self.wallet.test_mode = True - if args.x_full_verification: - self.manager._full_verification = True if args.x_fast_init_beta: self.log.warn('--x-fast-init-beta is now the default, no need to specify it') if args.x_rocksdb_indexes: @@ -195,11 +242,6 @@ def create_manager(self, reactor: PosixReactorBase, args: Namespace) -> HathorMa self.log.info('--x-enable-event-queue flag provided. ' 'The events detected by the full node will be stored and retrieved to clients') - self.manager.retain_events = args.x_retain_events is True - elif args.x_retain_events: - self.log.error('You cannot use --x-retain-events without --x-enable-event-queue.') - sys.exit(-1) - for description in args.listen: self.manager.add_listen_address(description) @@ -209,6 +251,11 @@ def create_manager(self, reactor: PosixReactorBase, args: Namespace) -> HathorMa return self.manager + def enable_wallet_index(self, indexes: IndexesManager, pubsub: PubSubManager) -> None: + self.log.debug('enable wallet indexes') + indexes.enable_address_index(pubsub) + indexes.enable_tokens_index() + def get_hostname(self, args: Namespace) -> str: if args.hostname and args.auto_hostname: print('You cannot use --hostname and --auto-hostname together.') @@ -465,6 +512,11 @@ def create_resources(self, args: Namespace) -> server.Site: ws_factory.subscribe(self.manager.pubsub) + # Event websocket resource + if args.x_enable_event_queue and self.event_ws_factory is not None: + root.putChild(b'event_ws', WebSocketResource(self.event_ws_factory)) + root.putChild(b'event', EventResource(self.manager._event_manager)) + # Websocket stats resource root.putChild(b'websocket_stats', WebsocketStatsResource(ws_factory)) diff --git a/hathor/builder/sysctl_builder.py b/hathor/builder/sysctl_builder.py new file mode 100644 index 000000000..a28c02fd7 --- /dev/null +++ b/hathor/builder/sysctl_builder.py @@ -0,0 +1,28 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.builder import BuildArtifacts +from hathor.sysctl import ConnectionsManagerSysctl, Sysctl + + +class SysctlBuilder: + """Builder for the sysctl tree.""" + def __init__(self, artifacts: BuildArtifacts) -> None: + self.artifacts = artifacts + + def build(self) -> Sysctl: + """Build the sysctl tree.""" + root = Sysctl() + root.put_child('p2p', ConnectionsManagerSysctl(self.artifacts.p2p_manager)) + return root diff --git a/hathor/cli/events_simulator/__init__.py b/hathor/cli/events_simulator/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathor/cli/events_simulator/events_simulator.py b/hathor/cli/events_simulator/events_simulator.py new file mode 100644 index 000000000..a265449a8 --- /dev/null +++ b/hathor/cli/events_simulator/events_simulator.py @@ -0,0 +1,53 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from argparse import ArgumentParser, Namespace + +DEFAULT_PORT = 8080 + + +def create_parser() -> ArgumentParser: + from hathor.cli.events_simulator.scenario import Scenario + from hathor.cli.util import create_parser + + parser = create_parser() + possible_scenarios = [scenario.value for scenario in Scenario] + + parser.add_argument('--scenario', help=f'One of {possible_scenarios}', type=Scenario, required=True) + parser.add_argument('--port', help='Port to run the WebSocket server', type=int, default=DEFAULT_PORT) + + return parser + + +def execute(args: Namespace) -> None: + from hathor.event.storage import EventMemoryStorage + from hathor.event.websocket import EventWebsocketFactory + from hathor.util import reactor + + storage = EventMemoryStorage() + + for event in args.scenario.get_events(): + storage.save_event(event) + + factory = EventWebsocketFactory(reactor, storage) + + factory.start() + reactor.listenTCP(args.port, factory) + reactor.run() + + +def main(): + parser = create_parser() + args = parser.parse_args() + execute(args) diff --git a/hathor/cli/events_simulator/scenario.py b/hathor/cli/events_simulator/scenario.py new file mode 100644 index 000000000..718b11b99 --- /dev/null +++ b/hathor/cli/events_simulator/scenario.py @@ -0,0 +1,76 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum + +from hathor.event.model.base_event import BaseEvent +from hathor.event.model.event_data import TxData, TxMetadata +from hathor.event.model.event_type import EventType + + +class Scenario(Enum): + SINGLE_CHAIN = 'SINGLE_CHAIN' + BEST_CHAIN_WITH_SIDE_CHAINS = 'BEST_CHAIN_WITH_SIDE_CHAINS' + MULTIPLE_FORKS = 'MULTIPLE_FORKS' + + def get_events(self): + return _SCENARIO_EVENTS[self] + + +_TRANSACTION_DATA_1 = TxData( + hash='123', + nonce=456, + timestamp=0, + version=1, + weight=2, + inputs=[], + outputs=[], + parents=[], + tokens=[], + metadata=TxMetadata( + hash='123', + spent_outputs=[], + conflict_with=[], + voided_by=[], + received_by=[], + children=[], + twins=[], + accumulated_weight=2, + score=2, + height=0, + validation='' + ) +) + +_TRANSACTION_1 = BaseEvent( + peer_id='123', + id=0, + timestamp=0, + type=EventType.NEW_VERTEX_ACCEPTED, + data=_TRANSACTION_DATA_1 +) + + +# TODO: We still have to actually populate the list of events for each scenario. Pending on design discussions. +_SCENARIO_EVENTS = { + Scenario.SINGLE_CHAIN: [ + _TRANSACTION_1 + ], + Scenario.BEST_CHAIN_WITH_SIDE_CHAINS: [ + _TRANSACTION_1 + ], + Scenario.MULTIPLE_FORKS: [ + _TRANSACTION_1 + ], +} diff --git a/hathor/cli/main.py b/hathor/cli/main.py index 798cfa22f..d1a81c4b9 100644 --- a/hathor/cli/main.py +++ b/hathor/cli/main.py @@ -55,6 +55,7 @@ def __init__(self) -> None: tx_generator, wallet, ) + from .events_simulator import events_simulator self.add_cmd('mining', 'run_miner', mining, 'Run a mining process (running node required)') self.add_cmd('mining', 'run_merged_mining', merged_mining, @@ -80,6 +81,7 @@ def __init__(self) -> None: self.add_cmd('dev', 'shell', shell, 'Run a Python shell') self.add_cmd('dev', 'quick_test', quick_test, 'Similar to run_node but will quit after receiving a tx') self.add_cmd('dev', 'generate_nginx_config', nginx_config, 'Generate nginx config from OpenAPI json') + self.add_cmd('dev', 'events_simulator', events_simulator, 'Simulate integration events via websocket') self.add_cmd('dev', 'x-export', db_export, 'EXPERIMENTAL: Export database to a simple format.') self.add_cmd('dev', 'x-import', db_import, 'EXPERIMENTAL: Import database from exported format.') self.add_cmd('dev', 'replay-logs', replay_logs, 'EXPERIMENTAL: re-play json logs as console printted') diff --git a/hathor/cli/openapi_files/openapi_base.json b/hathor/cli/openapi_files/openapi_base.json index e99159b61..8c367fdbf 100644 --- a/hathor/cli/openapi_files/openapi_base.json +++ b/hathor/cli/openapi_files/openapi_base.json @@ -7,7 +7,7 @@ ], "info": { "title": "Hathor API", - "version": "0.52.3" + "version": "0.53.0" }, "consumes": [ "application/json" diff --git a/hathor/cli/replay_logs.py b/hathor/cli/replay_logs.py index dccb94a71..05a19424a 100644 --- a/hathor/cli/replay_logs.py +++ b/hathor/cli/replay_logs.py @@ -27,9 +27,10 @@ def main(): help='Where to read json logs from, defaults to stdin.') parser.add_argument('output', type=argparse.FileType('w', encoding='UTF-8'), default=sys.stdout, nargs='?', help='Where to write pretty logs to, defaults to stdout.') + parser.add_argument('--color', action='store_true') args = parser.parse_args() - renderer = ConsoleRenderer(colors=args.output.isatty()) + renderer = ConsoleRenderer(colors=args.color or args.output.isatty()) while True: line_with_break = args.input.readline() diff --git a/hathor/cli/run_node.py b/hathor/cli/run_node.py index efa1b9574..d37fc3db6 100644 --- a/hathor/cli/run_node.py +++ b/hathor/cli/run_node.py @@ -45,6 +45,8 @@ def create_parser(cls) -> ArgumentParser: help='Reduces tx weight to 1 for testing purposes') parser.add_argument('--dns', action='append', help='Seed DNS') parser.add_argument('--peer', help='json file with peer info') + parser.add_argument('--sysctl', + help='Endpoint description (eg: unix:/path/sysctl.sock, tcp:5000:interface:127.0.0.1)') parser.add_argument('--listen', action='append', default=[], help='Address to listen for new connections (eg: tcp:8000)') parser.add_argument('--bootstrap', action='append', help='Address to connect to (eg: tcp:127.0.0.1:8000') @@ -88,6 +90,8 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--sentry-dsn', help='Sentry DSN') parser.add_argument('--enable-debug-api', action='store_true', help='Enable _debug/* endpoints') parser.add_argument('--enable-crash-api', action='store_true', help='Enable _crash/* endpoints') + parser.add_argument('--x-enable-legacy-sync-v1_0', action='store_true', help='Enable sync-v1.0, will not ' + 'disable sync-v1.1') v2args = parser.add_mutually_exclusive_group() v2args.add_argument('--x-sync-bridge', action='store_true', help='Enable support for running both sync protocols. DO NOT ENABLE, IT WILL BREAK.') @@ -96,7 +100,8 @@ def create_parser(cls) -> ArgumentParser: parser.add_argument('--x-localhost-only', action='store_true', help='Only connect to peers on localhost') parser.add_argument('--x-rocksdb-indexes', action='store_true', help=SUPPRESS) parser.add_argument('--x-enable-event-queue', action='store_true', help='Enable event queue mechanism') - parser.add_argument('--x-retain-events', action='store_true', help='Retain all events in the local database') + parser.add_argument('--x-emit-load-events', action='store_true', help='Enable emission of events during the ' + 'LOAD phase') parser.add_argument('--peer-id-blacklist', action='extend', default=[], nargs='+', type=str, help='Peer IDs to forbid connection') return parser @@ -137,6 +142,25 @@ def prepare(self, args: Namespace, *, register_resources: bool = True) -> None: if register_resources: builder.register_resources(args) + from hathor.conf import HathorSettings + settings = HathorSettings() + + from hathor.builder.builder import BuildArtifacts + self.artifacts = BuildArtifacts( + peer_id=self.manager.my_peer, + settings=settings, + rng=self.manager.rng, + reactor=self.manager.reactor, + manager=self.manager, + p2p_manager=self.manager.connections, + pubsub=self.manager.pubsub, + consensus=self.manager.consensus_algorithm, + tx_storage=self.manager.tx_storage, + indexes=self.manager.tx_storage.indexes, + wallet=self.manager.wallet, + rocksdb_storage=getattr(builder, 'rocksdb_storage', None), + ) + def start_sentry_if_possible(self, args: Namespace) -> None: """Start Sentry integration if possible.""" if not args.sentry_dsn: @@ -289,6 +313,32 @@ def __init__(self, *, argv=None): os.environ['HATHOR_CONFIG_FILE'] = 'hathor.conf.testnet' self.prepare(args) self.register_signal_handlers(args) + if args.sysctl: + self.init_sysctl(args.sysctl) + + def init_sysctl(self, description: str) -> None: + """Initialize sysctl and listen for connections. + + Examples of description: + - tcp:5000 + - tcp:5000:interface=127.0.0.1 + - unix:/path/sysctl.sock + - unix:/path/sysctl.sock:mode=660 + + For the full documentation, check the link below: + https://docs.twisted.org/en/stable/api/twisted.internet.endpoints.html#serverFromString + """ + from twisted.internet.endpoints import serverFromString + + from hathor.builder.sysctl_builder import SysctlBuilder + from hathor.sysctl.factory import SysctlFactory + + builder = SysctlBuilder(self.artifacts) + root = builder.build() + + factory = SysctlFactory(root) + endpoint = serverFromString(self.reactor, description) + endpoint.listen(factory) def parse_args(self, argv: List[str]) -> Namespace: return self.parser.parse_args(argv) diff --git a/hathor/cli/top.py b/hathor/cli/top.py index fa8a211ee..0d75f09bd 100644 --- a/hathor/cli/top.py +++ b/hathor/cli/top.py @@ -669,7 +669,7 @@ def __init__(self, loop: AbstractEventLoop, base_url: str, *, update_interval: i self.error: str = '' self.error_count: int = 0 - self.latest_data = None + self.latest_data: Optional[ProfileData] = None self.update_interval = update_interval self.task = None @@ -692,7 +692,7 @@ async def send_reset_cmd(self): data = await resp.json() return data - async def run(self): + async def run(self) -> Any: while True: try: data_dict: Dict[str, Any] = await self.fetch() @@ -716,7 +716,7 @@ async def fetch(self): return data class DefaultColor: - def __init__(self): + def __init__(self) -> None: self._color_map: Dict[Tuple[int, int], int] = {} A_NONE = 0 diff --git a/hathor/cli/tx_generator.py b/hathor/cli/tx_generator.py index 1561834ed..f96ed2272 100644 --- a/hathor/cli/tx_generator.py +++ b/hathor/cli/tx_generator.py @@ -17,7 +17,7 @@ import signal import sys import time -from argparse import ArgumentParser +from argparse import ArgumentParser, Namespace from json.decoder import JSONDecodeError from typing import Any, Dict @@ -43,7 +43,7 @@ def create_parser() -> ArgumentParser: return parser -def execute(args): +def execute(args: Namespace) -> None: import urllib.parse from requests.exceptions import ConnectionError @@ -68,6 +68,7 @@ def execute(args): addresses = args.address else: address_url = urllib.parse.urljoin(args.url, 'wallet/address') + '?new=false' + response = None while True: try: response = requests.get(address_url) @@ -86,6 +87,7 @@ def execute(args): continue else: conn_retries = 0 + assert response is not None addresses = [response.json()['address']] print('Addresses: {}'.format(addresses)) diff --git a/hathor/client.py b/hathor/client.py index 40fa7dfce..737f7d3e9 100644 --- a/hathor/client.py +++ b/hathor/client.py @@ -95,7 +95,7 @@ class HathorClient(IHathorClient): USER_AGENT = 'hathor-merged-mining' - def __init__(self, server_url, api_version=REQUIRED_HATHOR_API_VERSION): + def __init__(self, server_url: str, api_version: str = REQUIRED_HATHOR_API_VERSION) -> None: server_url = server_url.rstrip('/') + '/' if not (server_url.startswith('http://') or server_url.startswith('https://')): server_url = 'http://' + server_url diff --git a/hathor/conf/get_settings.py b/hathor/conf/get_settings.py index df7ebeffb..69b040c93 100644 --- a/hathor/conf/get_settings.py +++ b/hathor/conf/get_settings.py @@ -15,10 +15,11 @@ import importlib import os from types import ModuleType +from typing import Optional from hathor.conf.settings import HathorSettings as Settings -_config_file = None +_config_file: Optional[str] = None def HathorSettings() -> Settings: diff --git a/hathor/conf/mainnet.py b/hathor/conf/mainnet.py index 67f418c0a..a9426dbb9 100644 --- a/hathor/conf/mainnet.py +++ b/hathor/conf/mainnet.py @@ -62,6 +62,11 @@ cp(2_700_000, bytes.fromhex('00000000000000000cf3a35ab01a2281024ca4ca7871f5a6d67106eb36151038')), cp(2_800_000, bytes.fromhex('000000000000000004439733fd419a8a747e8afe2f89348a17c1fac24538a63c')), cp(2_900_000, bytes.fromhex('0000000000000000090cbd5a7958c82a2b969103001d92334f287dadcf3e01bc')), + cp(3_000_000, bytes.fromhex('000000000000000013c9086f4ce441f5db5de55a5e235f4f7f1ef223aedfe2db')), + cp(3_100_000, bytes.fromhex('00000000000000000d226a5998ffc65af89b1226126b1af1f8d0712a5301c775')), + cp(3_200_000, bytes.fromhex('0000000000000000028d9629d85d93d0f5e798a498ca7b1710ffc157fa045cd5')), + cp(3_300_000, bytes.fromhex('0000000000000000065b74441acb3d2ff770d384b2bad44c9823f26a0327690c')), + cp(3_400_000, bytes.fromhex('000000000000000077242c961a0c6f708bc671a8372eb8b095311f091fddc6c3')), ], SOFT_VOIDED_TX_IDS=list(map(bytes.fromhex, [ '0000000012a922a6887497bed9c41e5ed7dc7213cae107db295602168266cd02', diff --git a/hathor/conf/settings.py b/hathor/conf/settings.py index 34ebd6d3e..c9fd671df 100644 --- a/hathor/conf/settings.py +++ b/hathor/conf/settings.py @@ -369,3 +369,13 @@ def MAXIMUM_NUMBER_OF_HALVINGS(self) -> int: CONSENSUS_FAIL_ID: bytes = b'consensus-fail' ENABLE_EVENT_QUEUE_FEATURE: bool = False + + EVENT_API_DEFAULT_BATCH_SIZE: int = 100 + + EVENT_API_MAX_BATCH_SIZE: int = 1000 + + # Maximum number of sync running simultaneously. + MAX_ENABLED_SYNC: int = 16 + + # Time to update the peers that are running sync. + SYNC_UPDATE_INTERVAL: int = 10 * 60 # seconds diff --git a/hathor/conf/unittests.py b/hathor/conf/unittests.py index df85ae851..862ea7a23 100644 --- a/hathor/conf/unittests.py +++ b/hathor/conf/unittests.py @@ -33,5 +33,6 @@ GENESIS_TX2_HASH=bytes.fromhex('33e14cb555a96967841dcbe0f95e9eab5810481d01de8f4f73afb8cce365e869'), REWARD_SPEND_MIN_BLOCKS=10, SLOW_ASSERTS=True, + ENABLE_EVENT_QUEUE_FEATURE=True, MAX_TX_WEIGHT_DIFF_ACTIVATION=0.0, ) diff --git a/hathor/consensus.py b/hathor/consensus.py deleted file mode 100644 index 8c3691bcb..000000000 --- a/hathor/consensus.py +++ /dev/null @@ -1,1067 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from itertools import chain -from typing import Iterable, List, Optional, Set, cast - -from structlog import get_logger - -from hathor.conf import HathorSettings -from hathor.profiler import get_cpu_profiler -from hathor.pubsub import HathorEvents, PubSubManager -from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, sum_weights -from hathor.util import classproperty, not_none - -logger = get_logger() -settings = HathorSettings() -cpu = get_cpu_profiler() - -_base_transaction_log = logger.new() - - -class ConsensusAlgorithmContext: - """ An instance of this class holds all the relevant information related to a single run of a consensus update. - """ - - consensus: 'ConsensusAlgorithm' - block_algorithm: 'BlockConsensusAlgorithm' - transaction_algorithm: 'TransactionConsensusAlgorithm' - txs_affected: Set[BaseTransaction] - - def __init__(self, consensus: 'ConsensusAlgorithm') -> None: - self.consensus = consensus - self.block_algorithm = consensus.block_algorithm_factory(self) - self.transaction_algorithm = consensus.transaction_algorithm_factory(self) - self.txs_affected = set() - - def save(self, tx: BaseTransaction) -> None: - """Only metadata is ever saved in a consensus update.""" - assert tx.storage is not None - self.txs_affected.add(tx) - tx.storage.save_transaction(tx, only_metadata=True) - - -class ConsensusAlgorithm: - """Execute the consensus algorithm marking blocks and transactions as either executed or voided. - - The consensus algorithm uses the metadata voided_by to set whether a block or transaction is executed. - If voided_by is empty, then the block or transaction is executed. Otherwise, it is voided. - - The voided_by stores which hashes are causing the voidance. The hashes may be from both blocks and - transactions. - - The voidance propagates through the DAG of transactions. For example, if tx1 is voided and tx2 verifies - tx1, then tx2 must be voided as well. Another example is that, if a block is not in the bestchain, - any transaction spending one of the block's outputs is also voided. - - In the DAG of transactions, the voided_by of tx1 is always a subset of the voided_by of all transactions - that verifies tx1 or spend one of tx1's outputs. The hash of tx1 may only be on its own voided_by when - tx1 has conflicts and is not the winner. - - When a block is not in the bestchain, its voided_by contains its hash. This hash is also propagated - through the transactions that spend one of its outputs. - - Differently from transactions, the hash of the blocks are not propagated through the voided_by of - other blocks. For example, if b0 <- b1 <- b2 <- b3 is a side chain, i.e., not the best blockchain, - then b0's voided_by contains b0's hash, b1's voided_by contains b1's hash, and so on. The hash of - b0 will not be propagated to the voided_by of b1, b2, and b3. - """ - - def __init__(self, soft_voided_tx_ids: Set[bytes], pubsub: PubSubManager) -> None: - self.pubsub = pubsub - self.log = logger.new() - self.soft_voided_tx_ids = frozenset(soft_voided_tx_ids) - self.block_algorithm_factory = BlockConsensusAlgorithmFactory() - self.transaction_algorithm_factory = TransactionConsensusAlgorithmFactory() - - def create_context(self) -> ConsensusAlgorithmContext: - """Handy method to create a context that can be used to access block and transaction algorithms.""" - return ConsensusAlgorithmContext(self) - - @cpu.profiler(key=lambda self, base: 'consensus!{}'.format(base.hash.hex())) - def update(self, base: BaseTransaction) -> None: - try: - self._unsafe_update(base) - except Exception: - meta = base.get_metadata() - meta.add_voided_by(settings.CONSENSUS_FAIL_ID) - assert base.storage is not None - base.storage.save_transaction(base, only_metadata=True) - raise - - def _unsafe_update(self, base: BaseTransaction) -> None: - """Run a consensus update with its own context, indexes will be updated accordingly.""" - from hathor.transaction import Block, Transaction - - # this context instance will live only while this update is running - context = self.create_context() - - assert base.storage is not None - storage = base.storage - assert storage.indexes is not None - best_height, best_tip = storage.indexes.height.get_height_tip() - - if isinstance(base, Transaction): - context.transaction_algorithm.update_consensus(base) - elif isinstance(base, Block): - context.block_algorithm.update_consensus(base) - else: - raise NotImplementedError - - new_best_height, new_best_tip = storage.indexes.height.get_height_tip() - if new_best_height < best_height: - self.log.warn('height decreased, re-checking mempool', prev_height=best_height, new_height=new_best_height, - prev_block_tip=best_tip.hex(), new_block_tip=new_best_tip.hex()) - to_remove = storage.get_transactions_that_became_invalid() - if to_remove: - self.log.warn('some transactions on the mempool became invalid and will be removed', - count=len(to_remove)) - storage.remove_transactions(to_remove) - for tx_removed in to_remove: - self.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, tx_hash=tx_removed.hash) - - # finally signal an index update for all affected transactions - for tx_affected in context.txs_affected: - assert tx_affected.storage is not None - assert tx_affected.storage.indexes is not None - tx_affected.storage.indexes.update(tx_affected) - self.pubsub.publish(HathorEvents.CONSENSUS_TX_UPDATE, tx=tx_affected) - - def filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: Set[bytes]) -> Set[bytes]: - if not (self.soft_voided_tx_ids & voided_by): - return voided_by - ret = set() - for h in voided_by: - if h == settings.SOFT_VOIDED_ID: - continue - if h == tx.hash: - continue - if h in self.soft_voided_tx_ids: - continue - assert tx.storage is not None - tx3 = tx.storage.get_transaction(h) - tx3_meta = tx3.get_metadata() - tx3_voided_by: Set[bytes] = tx3_meta.voided_by or set() - if not (self.soft_voided_tx_ids & tx3_voided_by): - ret.add(h) - return ret - - -class BlockConsensusAlgorithm: - """Implement the consensus algorithm for blocks.""" - - def __init__(self, context: ConsensusAlgorithmContext) -> None: - self.context = context - - @classproperty - def log(cls): - """ This is a workaround because of a bug on structlog (or abc). - - See: https://github.com/hynek/structlog/issues/229 - """ - return _base_transaction_log - - def update_consensus(self, block: Block) -> None: - self.update_voided_info(block) - - def update_voided_info(self, block: Block) -> None: - """ This method is called only once when a new block arrives. - - The blockchain part of the DAG is a tree with the genesis block as the root. - I'll say the a block A is connected to a block B when A verifies B, i.e., B is a parent of A. - - A chain is a sequence of connected blocks starting in a leaf and ending in the root, i.e., any path from a leaf - to the root is a chain. Given a chain, its head is a leaf in the tree, and its tail is the sub-chain without - the head. - - The best chain is a chain that has the highest score of all chains. - - The score of a block is calculated as the sum of the weights of all transactions and blocks both direcly and - indirectly verified by the block. The score of a chain is defined as the score of its head. - - The side chains are the chains whose scores are smaller than the best chain's. - The head of the side chains are always voided blocks. - - There are two possible states for the block chain: - (i) It has a single best chain, i.e., one chain has the highest score - (ii) It has multiple best chains, i.e., two or more chains have the same score (and this score is the highest - among the chains) - - When there are multiple best chains, I'll call them best chain candidates. - - The arrived block can be connected in four possible ways: - (i) To the head of a best chain - (ii) To the tail of the best chain - (iii) To the head of a side chain - (iv) To the tail of a side chain - - Thus, there are eight cases to be handled when a new block arrives, which are: - (i) Single best chain, connected to the head of the best chain - (ii) Single best chain, connected to the tail of the best chain - (iii) Single best chain, connected to the head of a side chain - (iv) Single best chain, connected to the tail of a side chain - (v) Multiple best chains, connected to the head of a best chain - (vi) Multiple best chains, connected to the tail of a best chain - (vii) Multiple best chains, connected to the head of a side chain - (viii) Multiple best chains, connected to the tail of a side chain - - Case (i) is trivial because the single best chain will remain as the best chain. So, just calculate the new - score and that's it. - - Case (v) is also trivial. As there are multiple best chains and the new block is connected to the head of one - of them, this will be the new winner. So, the blockchain state will change to a single best chain again. - - In the other cases, we must calculate the score and compare with the best score. - - When there are multiple best chains, all their heads will be voided. - """ - assert block.weight > 0, 'This algorithm assumes that block\'s weight is always greater than zero' - if not block.parents: - assert block.is_genesis is True - self.update_score_and_mark_as_the_best_chain(block) - return - - assert block.storage is not None - assert block.hash is not None - - storage = block.storage - assert storage.indexes is not None - - # Union of voided_by of parents - voided_by: Set[bytes] = self.union_voided_by_from_parents(block) - - # Update accumulated weight of the transactions voiding us. - assert block.hash not in voided_by - for h in voided_by: - tx = storage.get_transaction(h) - tx_meta = tx.get_metadata() - tx_meta.accumulated_weight = sum_weights(tx_meta.accumulated_weight, block.weight) - self.context.save(tx) - - # Check conflicts of the transactions voiding us. - for h in voided_by: - tx = storage.get_transaction(h) - if not tx.is_block: - assert isinstance(tx, Transaction) - self.context.transaction_algorithm.check_conflicts(tx) - - parent = block.get_block_parent() - parent_meta = parent.get_metadata() - assert block.hash in parent_meta.children - - # This method is called after the metadata of the parent is updated. - # So, if the parent has only one child, it must be the current block. - is_connected_to_the_head = bool(len(parent_meta.children) == 1) - is_connected_to_the_best_chain = bool(not parent_meta.voided_by) - - if is_connected_to_the_head and is_connected_to_the_best_chain: - # Case (i): Single best chain, connected to the head of the best chain - self.update_score_and_mark_as_the_best_chain_if_possible(block) - # As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`, - # we need to check that block is not voided. - meta = block.get_metadata() - if not meta.voided_by: - storage.indexes.height.add_new(meta.height, block.hash, block.timestamp) - storage.update_best_block_tips_cache([block.hash]) - # The following assert must be true, but it is commented out for performance reasons. - if settings.SLOW_ASSERTS: - assert len(storage.get_best_block_tips(skip_cache=True)) == 1 - else: - # Resolve all other cases, but (i). - log = self.log.new(block=block.hash_hex) - log.debug('this block is not the head of the bestchain', - is_connected_to_the_head=is_connected_to_the_head, - is_connected_to_the_best_chain=is_connected_to_the_best_chain) - - # First, void this block. - self.mark_as_voided(block, skip_remove_first_block_markers=True) - - # Get the score of the best chains. - # We need to void this block first, because otherwise it would always be one of the heads. - heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()] - best_score = None - for head in heads: - head_meta = head.get_metadata(force_reload=True) - if best_score is None: - best_score = head_meta.score - else: - # All heads must have the same score. - assert abs(best_score - head_meta.score) < 1e-10 - assert isinstance(best_score, (int, float)) - - # Calculate the score. - # We cannot calculate score before getting the heads. - score = self.calculate_score(block) - - # Finally, check who the winner is. - if score <= best_score - settings.WEIGHT_TOL: - # Just update voided_by from parents. - self.update_voided_by_from_parents(block) - - else: - # Either eveyone has the same score or there is a winner. - - valid_heads = [] - for head in heads: - meta = head.get_metadata() - if not meta.voided_by: - valid_heads.append(head) - - # We must have at most one valid head. - # Either we have a single best chain or all chains have already been voided. - assert len(valid_heads) <= 1, 'We must never have more than one valid head' - - # Add voided_by to all heads. - self.add_voided_by_to_multiple_chains(block, heads) - - if score >= best_score + settings.WEIGHT_TOL: - # We have a new winner candidate. - self.update_score_and_mark_as_the_best_chain_if_possible(block) - # As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`, - # we need to check that block is not voided. - meta = block.get_metadata() - if not meta.voided_by: - self.log.debug('index new winner block', height=meta.height, block=block.hash_hex) - # We update the height cache index with the new winner chain - storage.indexes.height.update_new_chain(meta.height, block) - storage.update_best_block_tips_cache([block.hash]) - else: - storage.update_best_block_tips_cache([not_none(blk.hash) for blk in heads]) - - def union_voided_by_from_parents(self, block: Block) -> Set[bytes]: - """Return the union of the voided_by of block's parents. - - It does not include the hash of blocks because the hash of blocks - are not propagated through the chains. For further information, see - the docstring of the ConsensusAlgorithm class. - """ - voided_by: Set[bytes] = set() - for parent in block.get_parents(): - assert parent.hash is not None - parent_meta = parent.get_metadata() - voided_by2 = parent_meta.voided_by - if voided_by2: - if parent.is_block: - # We must go through the blocks because the voidance caused - # by a transaction must be sent ahead. For example, in the - # chain b0 <- b1 <- b2 <- b3, if a transaction voids b1, then - # it must also voids b2 and b3. But, we must ignore the hash of - # the blocks themselves. - voided_by2 = voided_by2.copy() - voided_by2.discard(parent.hash) - voided_by.update(self.context.consensus.filter_out_soft_voided_entries(parent, voided_by2)) - return voided_by - - def update_voided_by_from_parents(self, block: Block) -> bool: - """Update block's metadata voided_by from parents. - Return True if the block is voided and False otherwise.""" - assert block.storage is not None - voided_by: Set[bytes] = self.union_voided_by_from_parents(block) - if voided_by: - meta = block.get_metadata() - if meta.voided_by: - meta.voided_by.update(voided_by) - else: - meta.voided_by = voided_by.copy() - self.context.save(block) - block.storage.del_from_indexes(block, relax_assert=True) - return True - return False - - def add_voided_by_to_multiple_chains(self, block: Block, heads: List[Block]) -> None: - # We need to go through all side chains because there may be non-voided blocks - # that must be voided. - # For instance, imagine two chains with intersection with both heads voided. - # Now, a new chain starting in genesis reaches the same score. Then, the tail - # of the two chains must be voided. - first_block = self._find_first_parent_in_best_chain(block) - for head in heads: - while True: - if head.timestamp <= first_block.timestamp: - break - meta = head.get_metadata() - if not (meta.voided_by and head.hash in meta.voided_by): - # Only mark as voided when it is non-voided. - self.mark_as_voided(head) - # We have to go through the chain until the first parent in the best - # chain because the head may be voided with part of the tail non-voided. - head = head.get_block_parent() - - def update_score_and_mark_as_the_best_chain_if_possible(self, block: Block) -> None: - """Update block's score and mark it as best chain if it is a valid consensus. - If it is not, the block will be voided and the block with highest score will be set as - best chain. - """ - assert block.storage is not None - self.update_score_and_mark_as_the_best_chain(block) - self.remove_voided_by_from_chain(block) - - if self.update_voided_by_from_parents(block): - storage = block.storage - heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()] - best_score = 0.0 - best_heads: List[Block] - for head in heads: - head_meta = head.get_metadata(force_reload=True) - if head_meta.score <= best_score - settings.WEIGHT_TOL: - continue - - if head_meta.score >= best_score + settings.WEIGHT_TOL: - best_heads = [head] - best_score = head_meta.score - else: - assert abs(best_score - head_meta.score) < 1e-10 - best_heads.append(head) - assert isinstance(best_score, (int, float)) and best_score > 0 - - assert len(best_heads) > 0 - self.add_voided_by_to_multiple_chains(best_heads[0], [block]) - if len(best_heads) == 1: - self.update_score_and_mark_as_the_best_chain_if_possible(best_heads[0]) - - def update_score_and_mark_as_the_best_chain(self, block: Block) -> None: - """ Update score and mark the chain as the best chain. - Thus, transactions' first_block will point to the blocks in the chain. - """ - self.calculate_score(block, mark_as_best_chain=True) - - def remove_voided_by_from_chain(self, block: Block) -> None: - """ Remove voided_by from the chain. Now, it is the best chain. - - The blocks are visited from right to left (most recent to least recent). - """ - while True: - assert block.is_block - success = self.remove_voided_by(block) - if not success: - break - block = block.get_block_parent() - - def _find_first_parent_in_best_chain(self, block: Block) -> BaseTransaction: - """ Find the first block in the side chain that is not voided, i.e., the block where the fork started. - - In the simple schema below, the best chain's blocks are O's, the side chain's blocks are I's, and the first - valid block is the [O]. - - O-O-O-O-[O]-O-O-O-O - | - +-I-I-I - """ - assert block.storage is not None - storage = block.storage - - assert len(block.parents) > 0, 'This should never happen because the genesis is always in the best chain' - parent_hash = block.get_block_parent_hash() - while True: - parent = storage.get_transaction(parent_hash) - assert isinstance(parent, Block) - parent_meta = parent.get_metadata() - if not parent_meta.voided_by: - break - assert len(parent.parents) > 0, 'This should never happen because the genesis is always in the best chain' - parent_hash = parent.get_block_parent_hash() - return parent - - def mark_as_voided(self, block: Block, *, skip_remove_first_block_markers: bool = False) -> None: - """ Mark a block as voided. By default, it will remove the first block markers from - `meta.first_block` of the transactions that point to it. - """ - self.log.debug('block.mark_as_voided', block=block.hash_hex) - if not skip_remove_first_block_markers: - self.remove_first_block_markers(block) - self.add_voided_by(block) - - def add_voided_by(self, block: Block, voided_hash: Optional[bytes] = None) -> bool: - """ Add a new hash in its `meta.voided_by`. If `voided_hash` is None, it includes - the block's own hash. - """ - assert block.storage is not None - assert block.hash is not None - - storage = block.storage - - if voided_hash is None: - voided_hash = block.hash - assert voided_hash is not None - - meta = block.get_metadata() - if not meta.voided_by: - meta.voided_by = set() - if voided_hash in meta.voided_by: - return False - - self.log.debug('add_voided_by', block=block.hash_hex, voided_hash=voided_hash.hex()) - - meta.voided_by.add(voided_hash) - self.context.save(block) - - spent_by: Iterable[bytes] = chain(*meta.spent_outputs.values()) - for tx_hash in spent_by: - tx = storage.get_transaction(tx_hash) - assert isinstance(tx, Transaction) - self.context.transaction_algorithm.add_voided_by(tx, voided_hash) - return True - - def remove_voided_by(self, block: Block, voided_hash: Optional[bytes] = None) -> bool: - """ Remove a hash from its `meta.voided_by`. If `voided_hash` is None, it removes - the block's own hash. - """ - assert block.storage is not None - assert block.hash is not None - - storage = block.storage - - if voided_hash is None: - voided_hash = block.hash - - meta = block.get_metadata() - if not meta.voided_by: - return False - if voided_hash not in meta.voided_by: - return False - - self.log.debug('remove_voided_by', block=block.hash_hex, voided_hash=voided_hash.hex()) - - meta.voided_by.remove(voided_hash) - if not meta.voided_by: - meta.voided_by = None - self.context.save(block) - - spent_by: Iterable[bytes] = chain(*meta.spent_outputs.values()) - for tx_hash in spent_by: - tx = storage.get_transaction(tx_hash) - assert isinstance(tx, Transaction) - self.context.transaction_algorithm.remove_voided_by(tx, voided_hash) - return True - - def remove_first_block_markers(self, block: Block) -> None: - """ Remove all `meta.first_block` pointing to this block. - """ - assert block.storage is not None - storage = block.storage - - from hathor.transaction.storage.traversal import BFSWalk - bfs = BFSWalk(storage, is_dag_verifications=True, is_left_to_right=False) - for tx in bfs.run(block, skip_root=True): - if tx.is_block: - bfs.skip_neighbors(tx) - continue - - meta = tx.get_metadata() - if meta.first_block != block.hash: - bfs.skip_neighbors(tx) - continue - - meta.first_block = None - self.context.save(tx) - - def _score_block_dfs(self, block: BaseTransaction, used: Set[bytes], - mark_as_best_chain: bool, newest_timestamp: int) -> float: - """ Internal method to run a DFS. It is used by `calculate_score()`. - """ - assert block.storage is not None - assert block.hash is not None - assert block.is_block - - storage = block.storage - - from hathor.transaction import Block - score = block.weight - for parent in block.get_parents(): - if parent.is_block: - assert isinstance(parent, Block) - if parent.timestamp <= newest_timestamp: - meta = parent.get_metadata() - x = meta.score - else: - x = self._score_block_dfs(parent, used, mark_as_best_chain, newest_timestamp) - score = sum_weights(score, x) - - else: - from hathor.transaction.storage.traversal import BFSWalk - bfs = BFSWalk(storage, is_dag_verifications=True, is_left_to_right=False) - for tx in bfs.run(parent, skip_root=False): - assert tx.hash is not None - assert not tx.is_block - - if tx.hash in used: - bfs.skip_neighbors(tx) - continue - used.add(tx.hash) - - meta = tx.get_metadata() - if meta.first_block: - first_block = storage.get_transaction(meta.first_block) - if first_block.timestamp <= newest_timestamp: - bfs.skip_neighbors(tx) - continue - - if mark_as_best_chain: - assert meta.first_block is None - meta.first_block = block.hash - self.context.save(tx) - - score = sum_weights(score, tx.weight) - - # Always save the score when it is calculated. - meta = block.get_metadata() - if not meta.score: - meta.score = score - self.context.save(block) - else: - # The score of a block is immutable since the sub-DAG behind it is immutable as well. - # Thus, if we have already calculated it, we just check the consistency of the calculation. - # Unfortunately we may have to calculate it more than once when a new block arrives in a side - # side because the `first_block` points only to the best chain. - assert abs(meta.score - score) < 1e-10, \ - 'hash={} meta.score={} score={}'.format(block.hash.hex(), meta.score, score) - - return score - - def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> float: - """ Calculate block's score, which is the accumulated work of the verified transactions and blocks. - - :param: mark_as_best_chain: If `True`, the transactions' will point `meta.first_block` to - the blocks of the chain. - """ - assert block.storage is not None - if block.is_genesis: - if mark_as_best_chain: - meta = block.get_metadata() - meta.score = block.weight - self.context.save(block) - return block.weight - - parent = self._find_first_parent_in_best_chain(block) - newest_timestamp = parent.timestamp - - used: Set[bytes] = set() - return self._score_block_dfs(block, used, mark_as_best_chain, newest_timestamp) - - -class BlockConsensusAlgorithmFactory: - def __call__(self, context: ConsensusAlgorithmContext) -> BlockConsensusAlgorithm: - return BlockConsensusAlgorithm(context) - - -class TransactionConsensusAlgorithm: - """Implement the consensus algorithm for transactions.""" - - def __init__(self, context: ConsensusAlgorithmContext) -> None: - self.context = context - - @classproperty - def log(cls): - """ This is a workaround because of a bug on structlog (or abc). - - See: https://github.com/hynek/structlog/issues/229 - """ - return _base_transaction_log - - def update_consensus(self, tx: Transaction) -> None: - self.mark_inputs_as_used(tx) - self.update_voided_info(tx) - self.set_conflict_twins(tx) - - def mark_inputs_as_used(self, tx: Transaction) -> None: - """ Mark all its inputs as used - """ - for txin in tx.inputs: - self.mark_input_as_used(tx, txin) - - def mark_input_as_used(self, tx: Transaction, txin: TxInput) -> None: - """ Mark a given input as used - """ - assert tx.hash is not None - assert tx.storage is not None - - spent_tx = tx.storage.get_transaction(txin.tx_id) - spent_meta = spent_tx.get_metadata() - spent_by = spent_meta.spent_outputs[txin.index] - assert tx.hash not in spent_by - - # Update our meta.conflict_with. - meta = tx.get_metadata() - if spent_by: - # We initially void ourselves. This conflict will be resolved later. - if not meta.voided_by: - meta.voided_by = {tx.hash} - else: - meta.voided_by.add(tx.hash) - if meta.conflict_with: - meta.conflict_with.extend(set(spent_by) - set(meta.conflict_with)) - else: - meta.conflict_with = spent_by.copy() - self.context.save(tx) - - for h in spent_by: - # Update meta.conflict_with of our conflict transactions. - conflict_tx = tx.storage.get_transaction(h) - tx_meta = conflict_tx.get_metadata() - if tx_meta.conflict_with: - if tx.hash not in tx_meta.conflict_with: - # We could use a set instead of a list but it consumes ~2.15 times more of memory. - tx_meta.conflict_with.append(tx.hash) - else: - tx_meta.conflict_with = [tx.hash] - self.context.save(conflict_tx) - - # Add ourselves to meta.spent_by of our input. - spent_by.append(tx.hash) - self.context.save(spent_tx) - - def set_conflict_twins(self, tx: Transaction) -> None: - """ Get all transactions that conflict with self - and check if they are also a twin of self - """ - assert tx.storage is not None - - meta = tx.get_metadata() - if not meta.conflict_with: - return - - conflict_txs = [tx.storage.get_transaction(h) for h in meta.conflict_with] - self.check_twins(tx, conflict_txs) - - def check_twins(self, tx: Transaction, transactions: Iterable[BaseTransaction]) -> None: - """ Check if the tx has any twins in transactions list - A twin tx is a tx that has the same inputs and outputs - We add all the hashes of the twin txs in the metadata - - :param transactions: list of transactions to be checked if they are twins with self - """ - assert tx.hash is not None - assert tx.storage is not None - - # Getting tx metadata to save the new twins - meta = tx.get_metadata() - - # Sorting inputs and outputs for easier validation - sorted_inputs = sorted(tx.inputs, key=lambda x: (x.tx_id, x.index, x.data)) - sorted_outputs = sorted(tx.outputs, key=lambda x: (x.script, x.value)) - - for candidate in transactions: - assert candidate.hash is not None - - # If quantity of inputs is different, it's not a twin. - if len(candidate.inputs) != len(tx.inputs): - continue - - # If quantity of outputs is different, it's not a twin. - if len(candidate.outputs) != len(tx.outputs): - continue - - # If the hash is the same, it's not a twin. - if candidate.hash == tx.hash: - continue - - # Verify if all the inputs are the same - equal = True - for index, tx_input in enumerate(sorted(candidate.inputs, key=lambda x: (x.tx_id, x.index, x.data))): - if (tx_input.tx_id != sorted_inputs[index].tx_id or tx_input.data != sorted_inputs[index].data - or tx_input.index != sorted_inputs[index].index): - equal = False - break - - # Verify if all the outputs are the same - if equal: - for index, tx_output in enumerate(sorted(candidate.outputs, key=lambda x: (x.script, x.value))): - if (tx_output.value != sorted_outputs[index].value - or tx_output.script != sorted_outputs[index].script): - equal = False - break - - # If everything is equal we add in both metadatas - if equal: - meta.twins.append(candidate.hash) - tx_meta = candidate.get_metadata() - tx_meta.twins.append(tx.hash) - self.context.save(candidate) - - self.context.save(tx) - - def update_voided_info(self, tx: Transaction) -> None: - """ This method should be called only once when the transactions is added to the DAG. - """ - assert tx.hash is not None - assert tx.storage is not None - - voided_by: Set[bytes] = set() - - # Union of voided_by of parents - for parent in tx.get_parents(): - parent_meta = parent.get_metadata() - if parent_meta.voided_by: - voided_by.update(self.context.consensus.filter_out_soft_voided_entries(parent, parent_meta.voided_by)) - assert settings.SOFT_VOIDED_ID not in voided_by - assert not (self.context.consensus.soft_voided_tx_ids & voided_by) - - # Union of voided_by of inputs - for txin in tx.inputs: - spent_tx = tx.storage.get_transaction(txin.tx_id) - spent_meta = spent_tx.get_metadata() - if spent_meta.voided_by: - voided_by.update(spent_meta.voided_by) - voided_by.discard(settings.SOFT_VOIDED_ID) - assert settings.SOFT_VOIDED_ID not in voided_by - - # Update accumulated weight of the transactions voiding us. - assert tx.hash not in voided_by - for h in voided_by: - if h == settings.SOFT_VOIDED_ID: - continue - tx2 = tx.storage.get_transaction(h) - tx2_meta = tx2.get_metadata() - tx2_meta.accumulated_weight = sum_weights(tx2_meta.accumulated_weight, tx.weight) - self.context.save(tx2) - - # Then, we add ourselves. - meta = tx.get_metadata() - assert not meta.voided_by or meta.voided_by == {tx.hash} - assert meta.accumulated_weight == tx.weight - if tx.hash in self.context.consensus.soft_voided_tx_ids: - voided_by.add(settings.SOFT_VOIDED_ID) - voided_by.add(tx.hash) - if meta.conflict_with: - voided_by.add(tx.hash) - - # We must save before marking conflicts as voided because - # the conflicting tx might affect this tx's voided_by metadata. - if voided_by: - meta.voided_by = voided_by.copy() - self.context.save(tx) - tx.storage.del_from_indexes(tx) - - # Check conflicts of the transactions voiding us. - for h in voided_by: - if h == settings.SOFT_VOIDED_ID: - continue - if h == tx.hash: - continue - tx2 = tx.storage.get_transaction(h) - if not tx2.is_block: - assert isinstance(tx2, Transaction) - self.check_conflicts(tx2) - - # Mark voided conflicts as voided. - for h in meta.conflict_with or []: - conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) - conflict_tx_meta = conflict_tx.get_metadata() - if conflict_tx_meta.voided_by: - self.mark_as_voided(conflict_tx) - - # Finally, check our conflicts. - meta = tx.get_metadata() - if meta.voided_by == {tx.hash}: - self.check_conflicts(tx) - - # Assert the final state is valid. - self.assert_valid_consensus(tx) - - def assert_valid_consensus(self, tx: BaseTransaction) -> None: - """Assert the conflict resolution is valid.""" - meta = tx.get_metadata() - is_tx_executed = bool(not meta.voided_by) - for h in meta.conflict_with or []: - assert tx.storage is not None - conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) - conflict_tx_meta = conflict_tx.get_metadata() - is_conflict_tx_executed = bool(not conflict_tx_meta.voided_by) - assert not (is_tx_executed and is_conflict_tx_executed) - - def check_conflicts(self, tx: Transaction) -> None: - """ Check which transaction is the winner of a conflict, the remaining are voided. - - The verification is made for each input, and `self` is only marked as winner if it - wins in all its inputs. - """ - assert tx.hash is not None - assert tx.storage is not None - self.log.debug('tx.check_conflicts', tx=tx.hash_hex) - - meta = tx.get_metadata() - if meta.voided_by != {tx.hash}: - return - - # Filter the possible candidates to compare to tx. - candidates: List[Transaction] = [] - conflict_list: List[Transaction] = [] - for h in meta.conflict_with or []: - conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) - conflict_list.append(conflict_tx) - conflict_tx_meta = conflict_tx.get_metadata() - if not conflict_tx_meta.voided_by or conflict_tx_meta.voided_by == {conflict_tx.hash}: - candidates.append(conflict_tx) - - # Check whether we have the highest accumulated weight. - # First with the voided transactions. - is_highest = True - for candidate in candidates: - tx_meta = candidate.get_metadata() - if tx_meta.voided_by: - if tx_meta.accumulated_weight > meta.accumulated_weight: - is_highest = False - break - if not is_highest: - return - - # Then, with the executed transactions. - tie_list = [] - for candidate in candidates: - tx_meta = candidate.get_metadata() - if not tx_meta.voided_by: - candidate.update_accumulated_weight(stop_value=meta.accumulated_weight) - tx_meta = candidate.get_metadata() - d = tx_meta.accumulated_weight - meta.accumulated_weight - if abs(d) < settings.WEIGHT_TOL: - tie_list.append(candidate) - elif d > 0: - is_highest = False - break - if not is_highest: - return - - # If we got here, either it was a tie or we won. - # So, let's void the conflict txs. - for conflict_tx in conflict_list: - self.mark_as_voided(conflict_tx) - - if not tie_list: - # If it is not a tie, we won. \o/ - self.mark_as_winner(tx) - - def mark_as_winner(self, tx: Transaction) -> None: - """ Mark a transaction as winner when it has a conflict and its aggregated weight - is the greatest one. - """ - assert tx.hash is not None - self.log.debug('tx.mark_as_winner', tx=tx.hash_hex) - meta = tx.get_metadata() - assert bool(meta.conflict_with) # FIXME: this looks like a runtime guarantee, MUST NOT be an assert - assert meta.voided_by == {tx.hash} - assert tx.hash not in self.context.consensus.soft_voided_tx_ids - self.remove_voided_by(tx, tx.hash) - self.assert_valid_consensus(tx) - - def remove_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: - """ Remove a hash from `meta.voided_by` and its descendants (both from verification DAG - and funds tree). - """ - from hathor.transaction.storage.traversal import BFSWalk - - assert tx.hash is not None - assert tx.storage is not None - - meta = tx.get_metadata() - if not meta.voided_by: - return False - if voided_hash not in meta.voided_by: - return False - - self.log.debug('remove_voided_by', tx=tx.hash_hex, voided_hash=voided_hash.hex()) - - bfs = BFSWalk(tx.storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True) - check_list: List[BaseTransaction] = [] - for tx2 in bfs.run(tx, skip_root=False): - assert tx2.storage is not None - - meta2 = tx2.get_metadata() - if not (meta2.voided_by and voided_hash in meta2.voided_by): - bfs.skip_neighbors(tx2) - continue - if meta2.voided_by: - meta2.voided_by.discard(voided_hash) - if meta2.voided_by == {tx2.hash}: - check_list.append(tx2) - if not meta2.voided_by: - meta2.voided_by = None - tx.storage.add_to_indexes(tx2) - self.context.save(tx2) - self.assert_valid_consensus(tx2) - - from hathor.transaction import Transaction - for tx2 in check_list: - if not tx2.is_block: - assert isinstance(tx2, Transaction) - self.check_conflicts(tx2) - return True - - def mark_as_voided(self, tx: Transaction) -> None: - """ Mark a transaction as voided when it has a conflict and its aggregated weight - is NOT the greatest one. - """ - assert tx.hash is not None - self.log.debug('tx.mark_as_voided', tx=tx.hash_hex) - meta = tx.get_metadata() - assert bool(meta.conflict_with) - if meta.voided_by and tx.hash in meta.voided_by: - return - self.add_voided_by(tx, tx.hash) - self.assert_valid_consensus(tx) - - def add_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: - """ Add a hash from `meta.voided_by` and its descendants (both from verification DAG - and funds tree). - """ - assert tx.hash is not None - assert tx.storage is not None - - meta = tx.get_metadata() - if meta.voided_by and voided_hash in meta.voided_by: - return False - - self.log.debug('add_voided_by', tx=tx.hash_hex, voided_hash=voided_hash.hex()) - - is_dag_verifications = True - if meta.voided_by and bool(self.context.consensus.soft_voided_tx_ids & meta.voided_by): - # If tx is soft voided, we can only walk through the DAG of funds. - is_dag_verifications = False - - from hathor.transaction.storage.traversal import BFSWalk - bfs = BFSWalk(tx.storage, is_dag_funds=True, is_dag_verifications=is_dag_verifications, is_left_to_right=True) - check_list: List[Transaction] = [] - for tx2 in bfs.run(tx, skip_root=False): - assert tx2.storage is not None - assert tx2.hash is not None - meta2 = tx2.get_metadata() - - if tx2.is_block: - assert isinstance(tx2, Block) - self.context.block_algorithm.mark_as_voided(tx2) - tx2.storage.update_best_block_tips_cache(None) - - assert not meta2.voided_by or voided_hash not in meta2.voided_by - if tx2.hash != tx.hash and meta2.conflict_with and not meta2.voided_by: - check_list.extend(cast(Transaction, tx2.storage.get_transaction(h)) for h in meta2.conflict_with) - if meta2.voided_by: - meta2.voided_by.add(voided_hash) - else: - meta2.voided_by = {voided_hash} - if meta2.conflict_with: - assert isinstance(tx2, Transaction) - self.mark_as_voided(tx2) - # All voided transactions with conflicts must have their accumulated weight calculated. - tx2.update_accumulated_weight(save_file=False) - self.context.save(tx2) - tx2.storage.del_from_indexes(tx2, relax_assert=True) - self.assert_valid_consensus(tx2) - - for tx2 in check_list: - self.check_conflicts(tx2) - return True - - -class TransactionConsensusAlgorithmFactory: - def __call__(self, context: ConsensusAlgorithmContext) -> TransactionConsensusAlgorithm: - return TransactionConsensusAlgorithm(context) diff --git a/hathor/consensus/__init__.py b/hathor/consensus/__init__.py new file mode 100644 index 000000000..c0fd3541e --- /dev/null +++ b/hathor/consensus/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.consensus.consensus import ConsensusAlgorithm + +__all__ = [ + 'ConsensusAlgorithm', +] diff --git a/hathor/consensus/block_consensus.py b/hathor/consensus/block_consensus.py new file mode 100644 index 000000000..a16810829 --- /dev/null +++ b/hathor/consensus/block_consensus.py @@ -0,0 +1,535 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from itertools import chain +from typing import TYPE_CHECKING, Iterable, List, Optional, Set, cast + +from structlog import get_logger + +from hathor.conf import HathorSettings +from hathor.profiler import get_cpu_profiler +from hathor.transaction import BaseTransaction, Block, Transaction, sum_weights +from hathor.util import classproperty, not_none + +if TYPE_CHECKING: + from hathor.consensus.context import ConsensusAlgorithmContext + +logger = get_logger() +settings = HathorSettings() +cpu = get_cpu_profiler() + +_base_transaction_log = logger.new() + + +class BlockConsensusAlgorithm: + """Implement the consensus algorithm for blocks.""" + + def __init__(self, context: 'ConsensusAlgorithmContext') -> None: + self.context = context + + @classproperty + def log(cls): + """ This is a workaround because of a bug on structlog (or abc). + + See: https://github.com/hynek/structlog/issues/229 + """ + return _base_transaction_log + + def update_consensus(self, block: Block) -> None: + self.update_voided_info(block) + + def update_voided_info(self, block: Block) -> None: + """ This method is called only once when a new block arrives. + + The blockchain part of the DAG is a tree with the genesis block as the root. + I'll say the a block A is connected to a block B when A verifies B, i.e., B is a parent of A. + + A chain is a sequence of connected blocks starting in a leaf and ending in the root, i.e., any path from a leaf + to the root is a chain. Given a chain, its head is a leaf in the tree, and its tail is the sub-chain without + the head. + + The best chain is a chain that has the highest score of all chains. + + The score of a block is calculated as the sum of the weights of all transactions and blocks both direcly and + indirectly verified by the block. The score of a chain is defined as the score of its head. + + The side chains are the chains whose scores are smaller than the best chain's. + The head of the side chains are always voided blocks. + + There are two possible states for the block chain: + (i) It has a single best chain, i.e., one chain has the highest score + (ii) It has multiple best chains, i.e., two or more chains have the same score (and this score is the highest + among the chains) + + When there are multiple best chains, I'll call them best chain candidates. + + The arrived block can be connected in four possible ways: + (i) To the head of a best chain + (ii) To the tail of the best chain + (iii) To the head of a side chain + (iv) To the tail of a side chain + + Thus, there are eight cases to be handled when a new block arrives, which are: + (i) Single best chain, connected to the head of the best chain + (ii) Single best chain, connected to the tail of the best chain + (iii) Single best chain, connected to the head of a side chain + (iv) Single best chain, connected to the tail of a side chain + (v) Multiple best chains, connected to the head of a best chain + (vi) Multiple best chains, connected to the tail of a best chain + (vii) Multiple best chains, connected to the head of a side chain + (viii) Multiple best chains, connected to the tail of a side chain + + Case (i) is trivial because the single best chain will remain as the best chain. So, just calculate the new + score and that's it. + + Case (v) is also trivial. As there are multiple best chains and the new block is connected to the head of one + of them, this will be the new winner. So, the blockchain state will change to a single best chain again. + + In the other cases, we must calculate the score and compare with the best score. + + When there are multiple best chains, all their heads will be voided. + """ + assert block.weight > 0, 'This algorithm assumes that block\'s weight is always greater than zero' + if not block.parents: + assert block.is_genesis is True + self.update_score_and_mark_as_the_best_chain(block) + return + + assert block.storage is not None + assert block.hash is not None + + storage = block.storage + assert storage.indexes is not None + + # Union of voided_by of parents + voided_by: Set[bytes] = self.union_voided_by_from_parents(block) + + # Update accumulated weight of the transactions voiding us. + assert block.hash not in voided_by + for h in voided_by: + tx = storage.get_transaction(h) + tx_meta = tx.get_metadata() + tx_meta.accumulated_weight = sum_weights(tx_meta.accumulated_weight, block.weight) + self.context.save(tx) + + # Check conflicts of the transactions voiding us. + for h in voided_by: + tx = storage.get_transaction(h) + if not tx.is_block: + assert isinstance(tx, Transaction) + self.context.transaction_algorithm.check_conflicts(tx) + + parent = block.get_block_parent() + parent_meta = parent.get_metadata() + assert block.hash in parent_meta.children + + # This method is called after the metadata of the parent is updated. + # So, if the parent has only one child, it must be the current block. + is_connected_to_the_head = bool(len(parent_meta.children) == 1) + is_connected_to_the_best_chain = bool(not parent_meta.voided_by) + + if is_connected_to_the_head and is_connected_to_the_best_chain: + # Case (i): Single best chain, connected to the head of the best chain + self.update_score_and_mark_as_the_best_chain_if_possible(block) + # As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`, + # we need to check that block is not voided. + meta = block.get_metadata() + if not meta.voided_by: + storage.indexes.height.add_new(meta.height, block.hash, block.timestamp) + storage.update_best_block_tips_cache([block.hash]) + # The following assert must be true, but it is commented out for performance reasons. + if settings.SLOW_ASSERTS: + assert len(storage.get_best_block_tips(skip_cache=True)) == 1 + else: + # Resolve all other cases, but (i). + log = self.log.new(block=block.hash_hex) + log.debug('this block is not the head of the bestchain', + is_connected_to_the_head=is_connected_to_the_head, + is_connected_to_the_best_chain=is_connected_to_the_best_chain) + + # First, void this block. + self.mark_as_voided(block, skip_remove_first_block_markers=True) + + # Get the score of the best chains. + # We need to void this block first, because otherwise it would always be one of the heads. + heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()] + best_score = None + for head in heads: + head_meta = head.get_metadata(force_reload=True) + if best_score is None: + best_score = head_meta.score + else: + # All heads must have the same score. + assert abs(best_score - head_meta.score) < 1e-10 + assert isinstance(best_score, (int, float)) + + # Calculate the score. + # We cannot calculate score before getting the heads. + score = self.calculate_score(block) + + # Finally, check who the winner is. + if score <= best_score - settings.WEIGHT_TOL: + # Just update voided_by from parents. + self.update_voided_by_from_parents(block) + + else: + # Either eveyone has the same score or there is a winner. + + valid_heads = [] + for head in heads: + meta = head.get_metadata() + if not meta.voided_by: + valid_heads.append(head) + + # We must have at most one valid head. + # Either we have a single best chain or all chains have already been voided. + assert len(valid_heads) <= 1, 'We must never have more than one valid head' + + # Add voided_by to all heads. + common_block = self._find_first_parent_in_best_chain(block) + self.add_voided_by_to_multiple_chains(block, heads, common_block) + + if score >= best_score + settings.WEIGHT_TOL: + # We have a new winner candidate. + self.update_score_and_mark_as_the_best_chain_if_possible(block) + # As `update_score_and_mark_as_the_best_chain_if_possible` may affect `voided_by`, + # we need to check that block is not voided. + meta = block.get_metadata() + if not meta.voided_by: + self.log.debug('index new winner block', height=meta.height, block=block.hash_hex) + # We update the height cache index with the new winner chain + storage.indexes.height.update_new_chain(meta.height, block) + storage.update_best_block_tips_cache([block.hash]) + # It is only a re-org if common_block not in heads + if common_block not in heads: + self.context.mark_as_reorg(common_block) + else: + storage.update_best_block_tips_cache([not_none(blk.hash) for blk in heads]) + if not meta.voided_by: + self.context.mark_as_reorg(common_block) + + def union_voided_by_from_parents(self, block: Block) -> Set[bytes]: + """Return the union of the voided_by of block's parents. + + It does not include the hash of blocks because the hash of blocks + are not propagated through the chains. For further information, see + the docstring of the ConsensusAlgorithm class. + """ + voided_by: Set[bytes] = set() + for parent in block.get_parents(): + assert parent.hash is not None + parent_meta = parent.get_metadata() + voided_by2 = parent_meta.voided_by + if voided_by2: + if parent.is_block: + # We must go through the blocks because the voidance caused + # by a transaction must be sent ahead. For example, in the + # chain b0 <- b1 <- b2 <- b3, if a transaction voids b1, then + # it must also voids b2 and b3. But, we must ignore the hash of + # the blocks themselves. + voided_by2 = voided_by2.copy() + voided_by2.discard(parent.hash) + voided_by.update(self.context.consensus.filter_out_soft_voided_entries(parent, voided_by2)) + return voided_by + + def update_voided_by_from_parents(self, block: Block) -> bool: + """Update block's metadata voided_by from parents. + Return True if the block is voided and False otherwise.""" + assert block.storage is not None + voided_by: Set[bytes] = self.union_voided_by_from_parents(block) + if voided_by: + meta = block.get_metadata() + if meta.voided_by: + meta.voided_by.update(voided_by) + else: + meta.voided_by = voided_by.copy() + self.context.save(block) + block.storage.del_from_indexes(block, relax_assert=True) + return True + return False + + def add_voided_by_to_multiple_chains(self, block: Block, heads: List[Block], first_block: Block) -> None: + # We need to go through all side chains because there may be non-voided blocks + # that must be voided. + # For instance, imagine two chains with intersection with both heads voided. + # Now, a new chain starting in genesis reaches the same score. Then, the tail + # of the two chains must be voided. + for head in heads: + while True: + if head.timestamp <= first_block.timestamp: + break + meta = head.get_metadata() + if not (meta.voided_by and head.hash in meta.voided_by): + # Only mark as voided when it is non-voided. + self.mark_as_voided(head) + # We have to go through the chain until the first parent in the best + # chain because the head may be voided with part of the tail non-voided. + head = head.get_block_parent() + + def update_score_and_mark_as_the_best_chain_if_possible(self, block: Block) -> None: + """Update block's score and mark it as best chain if it is a valid consensus. + If it is not, the block will be voided and the block with highest score will be set as + best chain. + """ + assert block.storage is not None + self.update_score_and_mark_as_the_best_chain(block) + self.remove_voided_by_from_chain(block) + + if self.update_voided_by_from_parents(block): + storage = block.storage + heads = [cast(Block, storage.get_transaction(h)) for h in storage.get_best_block_tips()] + best_score = 0.0 + best_heads: List[Block] + for head in heads: + head_meta = head.get_metadata(force_reload=True) + if head_meta.score <= best_score - settings.WEIGHT_TOL: + continue + + if head_meta.score >= best_score + settings.WEIGHT_TOL: + best_heads = [head] + best_score = head_meta.score + else: + assert abs(best_score - head_meta.score) < 1e-10 + best_heads.append(head) + assert isinstance(best_score, (int, float)) and best_score > 0 + + assert len(best_heads) > 0 + first_block = self._find_first_parent_in_best_chain(best_heads[0]) + self.add_voided_by_to_multiple_chains(best_heads[0], [block], first_block) + if len(best_heads) == 1: + self.update_score_and_mark_as_the_best_chain_if_possible(best_heads[0]) + + def update_score_and_mark_as_the_best_chain(self, block: Block) -> None: + """ Update score and mark the chain as the best chain. + Thus, transactions' first_block will point to the blocks in the chain. + """ + self.calculate_score(block, mark_as_best_chain=True) + + def remove_voided_by_from_chain(self, block: Block) -> None: + """ Remove voided_by from the chain. Now, it is the best chain. + + The blocks are visited from right to left (most recent to least recent). + """ + while True: + assert block.is_block + success = self.remove_voided_by(block) + if not success: + break + block = block.get_block_parent() + + def _find_first_parent_in_best_chain(self, block: Block) -> Block: + """ Find the first block in the side chain that is not voided, i.e., the block where the fork started. + + In the simple schema below, the best chain's blocks are O's, the side chain's blocks are I's, and the first + valid block is the [O]. + + O-O-O-O-[O]-O-O-O-O + | + +-I-I-I + """ + assert block.storage is not None + storage = block.storage + + assert len(block.parents) > 0, 'This should never happen because the genesis is always in the best chain' + parent_hash = block.get_block_parent_hash() + while True: + parent = storage.get_transaction(parent_hash) + assert isinstance(parent, Block) + parent_meta = parent.get_metadata() + if not parent_meta.voided_by: + break + assert len(parent.parents) > 0, 'This should never happen because the genesis is always in the best chain' + parent_hash = parent.get_block_parent_hash() + return parent + + def mark_as_voided(self, block: Block, *, skip_remove_first_block_markers: bool = False) -> None: + """ Mark a block as voided. By default, it will remove the first block markers from + `meta.first_block` of the transactions that point to it. + """ + self.log.debug('block.mark_as_voided', block=block.hash_hex) + if not skip_remove_first_block_markers: + self.remove_first_block_markers(block) + self.add_voided_by(block) + + def add_voided_by(self, block: Block, voided_hash: Optional[bytes] = None) -> bool: + """ Add a new hash in its `meta.voided_by`. If `voided_hash` is None, it includes + the block's own hash. + """ + assert block.storage is not None + assert block.hash is not None + + storage = block.storage + + if voided_hash is None: + voided_hash = block.hash + assert voided_hash is not None + + meta = block.get_metadata() + if not meta.voided_by: + meta.voided_by = set() + if voided_hash in meta.voided_by: + return False + + self.log.debug('add_voided_by', block=block.hash_hex, voided_hash=voided_hash.hex()) + + meta.voided_by.add(voided_hash) + self.context.save(block) + + spent_by: Iterable[bytes] = chain(*meta.spent_outputs.values()) + for tx_hash in spent_by: + tx = storage.get_transaction(tx_hash) + assert isinstance(tx, Transaction) + self.context.transaction_algorithm.add_voided_by(tx, voided_hash) + return True + + def remove_voided_by(self, block: Block, voided_hash: Optional[bytes] = None) -> bool: + """ Remove a hash from its `meta.voided_by`. If `voided_hash` is None, it removes + the block's own hash. + """ + assert block.storage is not None + assert block.hash is not None + + storage = block.storage + + if voided_hash is None: + voided_hash = block.hash + + meta = block.get_metadata() + if not meta.voided_by: + return False + if voided_hash not in meta.voided_by: + return False + + self.log.debug('remove_voided_by', block=block.hash_hex, voided_hash=voided_hash.hex()) + + meta.voided_by.remove(voided_hash) + if not meta.voided_by: + meta.voided_by = None + self.context.save(block) + + spent_by: Iterable[bytes] = chain(*meta.spent_outputs.values()) + for tx_hash in spent_by: + tx = storage.get_transaction(tx_hash) + assert isinstance(tx, Transaction) + self.context.transaction_algorithm.remove_voided_by(tx, voided_hash) + return True + + def remove_first_block_markers(self, block: Block) -> None: + """ Remove all `meta.first_block` pointing to this block. + """ + assert block.storage is not None + storage = block.storage + + from hathor.transaction.storage.traversal import BFSWalk + bfs = BFSWalk(storage, is_dag_verifications=True, is_left_to_right=False) + for tx in bfs.run(block, skip_root=True): + if tx.is_block: + bfs.skip_neighbors(tx) + continue + + meta = tx.get_metadata() + if meta.first_block != block.hash: + bfs.skip_neighbors(tx) + continue + + meta.first_block = None + self.context.save(tx) + + def _score_block_dfs(self, block: BaseTransaction, used: Set[bytes], + mark_as_best_chain: bool, newest_timestamp: int) -> float: + """ Internal method to run a DFS. It is used by `calculate_score()`. + """ + assert block.storage is not None + assert block.hash is not None + assert block.is_block + + storage = block.storage + + from hathor.transaction import Block + score = block.weight + for parent in block.get_parents(): + if parent.is_block: + assert isinstance(parent, Block) + if parent.timestamp <= newest_timestamp: + meta = parent.get_metadata() + x = meta.score + else: + x = self._score_block_dfs(parent, used, mark_as_best_chain, newest_timestamp) + score = sum_weights(score, x) + + else: + from hathor.transaction.storage.traversal import BFSWalk + bfs = BFSWalk(storage, is_dag_verifications=True, is_left_to_right=False) + for tx in bfs.run(parent, skip_root=False): + assert tx.hash is not None + assert not tx.is_block + + if tx.hash in used: + bfs.skip_neighbors(tx) + continue + used.add(tx.hash) + + meta = tx.get_metadata() + if meta.first_block: + first_block = storage.get_transaction(meta.first_block) + if first_block.timestamp <= newest_timestamp: + bfs.skip_neighbors(tx) + continue + + if mark_as_best_chain: + assert meta.first_block is None + meta.first_block = block.hash + self.context.save(tx) + + score = sum_weights(score, tx.weight) + + # Always save the score when it is calculated. + meta = block.get_metadata() + if not meta.score: + meta.score = score + self.context.save(block) + else: + # The score of a block is immutable since the sub-DAG behind it is immutable as well. + # Thus, if we have already calculated it, we just check the consistency of the calculation. + # Unfortunately we may have to calculate it more than once when a new block arrives in a side + # side because the `first_block` points only to the best chain. + assert abs(meta.score - score) < 1e-10, \ + 'hash={} meta.score={} score={}'.format(block.hash.hex(), meta.score, score) + + return score + + def calculate_score(self, block: Block, *, mark_as_best_chain: bool = False) -> float: + """ Calculate block's score, which is the accumulated work of the verified transactions and blocks. + + :param: mark_as_best_chain: If `True`, the transactions' will point `meta.first_block` to + the blocks of the chain. + """ + assert block.storage is not None + if block.is_genesis: + if mark_as_best_chain: + meta = block.get_metadata() + meta.score = block.weight + self.context.save(block) + return block.weight + + parent = self._find_first_parent_in_best_chain(block) + newest_timestamp = parent.timestamp + + used: Set[bytes] = set() + return self._score_block_dfs(block, used, mark_as_best_chain, newest_timestamp) + + +class BlockConsensusAlgorithmFactory: + def __call__(self, context: 'ConsensusAlgorithmContext') -> BlockConsensusAlgorithm: + return BlockConsensusAlgorithm(context) diff --git a/hathor/consensus/consensus.py b/hathor/consensus/consensus.py new file mode 100644 index 000000000..045396cf6 --- /dev/null +++ b/hathor/consensus/consensus.py @@ -0,0 +1,155 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Set + +from structlog import get_logger + +from hathor.conf import HathorSettings +from hathor.consensus.block_consensus import BlockConsensusAlgorithmFactory +from hathor.consensus.context import ConsensusAlgorithmContext +from hathor.consensus.transaction_consensus import TransactionConsensusAlgorithmFactory +from hathor.profiler import get_cpu_profiler +from hathor.pubsub import HathorEvents, PubSubManager +from hathor.transaction import BaseTransaction + +logger = get_logger() +settings = HathorSettings() +cpu = get_cpu_profiler() + +_base_transaction_log = logger.new() + + +class ConsensusAlgorithm: + """Execute the consensus algorithm marking blocks and transactions as either executed or voided. + + The consensus algorithm uses the metadata voided_by to set whether a block or transaction is executed. + If voided_by is empty, then the block or transaction is executed. Otherwise, it is voided. + + The voided_by stores which hashes are causing the voidance. The hashes may be from both blocks and + transactions. + + The voidance propagates through the DAG of transactions. For example, if tx1 is voided and tx2 verifies + tx1, then tx2 must be voided as well. Another example is that, if a block is not in the bestchain, + any transaction spending one of the block's outputs is also voided. + + In the DAG of transactions, the voided_by of tx1 is always a subset of the voided_by of all transactions + that verifies tx1 or spend one of tx1's outputs. The hash of tx1 may only be on its own voided_by when + tx1 has conflicts and is not the winner. + + When a block is not in the bestchain, its voided_by contains its hash. This hash is also propagated + through the transactions that spend one of its outputs. + + Differently from transactions, the hash of the blocks are not propagated through the voided_by of + other blocks. For example, if b0 <- b1 <- b2 <- b3 is a side chain, i.e., not the best blockchain, + then b0's voided_by contains b0's hash, b1's voided_by contains b1's hash, and so on. The hash of + b0 will not be propagated to the voided_by of b1, b2, and b3. + """ + + def __init__(self, soft_voided_tx_ids: Set[bytes], pubsub: PubSubManager) -> None: + self.log = logger.new() + self._pubsub = pubsub + self.soft_voided_tx_ids = frozenset(soft_voided_tx_ids) + self.block_algorithm_factory = BlockConsensusAlgorithmFactory() + self.transaction_algorithm_factory = TransactionConsensusAlgorithmFactory() + + def create_context(self) -> ConsensusAlgorithmContext: + """Handy method to create a context that can be used to access block and transaction algorithms.""" + return ConsensusAlgorithmContext(self, self._pubsub) + + @cpu.profiler(key=lambda self, base: 'consensus!{}'.format(base.hash.hex())) + def update(self, base: BaseTransaction) -> None: + try: + self._unsafe_update(base) + except Exception: + meta = base.get_metadata() + meta.add_voided_by(settings.CONSENSUS_FAIL_ID) + assert base.storage is not None + base.storage.save_transaction(base, only_metadata=True) + raise + + def _unsafe_update(self, base: BaseTransaction) -> None: + """Run a consensus update with its own context, indexes will be updated accordingly.""" + from hathor.transaction import Block, Transaction + + # this context instance will live only while this update is running + context = self.create_context() + + assert base.storage is not None + storage = base.storage + assert storage.indexes is not None + best_height, best_tip = storage.indexes.height.get_height_tip() + + if isinstance(base, Transaction): + context.transaction_algorithm.update_consensus(base) + elif isinstance(base, Block): + context.block_algorithm.update_consensus(base) + else: + raise NotImplementedError + + new_best_height, new_best_tip = storage.indexes.height.get_height_tip() + if new_best_height < best_height: + self.log.warn('height decreased, re-checking mempool', prev_height=best_height, new_height=new_best_height, + prev_block_tip=best_tip.hex(), new_block_tip=new_best_tip.hex()) + to_remove = storage.get_transactions_that_became_invalid() + if to_remove: + self.log.warn('some transactions on the mempool became invalid and will be removed', + count=len(to_remove)) + storage.remove_transactions(to_remove) + for tx_removed in to_remove: + context.pubsub.publish(HathorEvents.CONSENSUS_TX_REMOVED, tx_hash=tx_removed.hash) + + # emit the reorg started event if needed + if context.reorg_common_block is not None: + old_best_block = base.storage.get_transaction(best_tip) + new_best_block = base.storage.get_transaction(new_best_tip) + old_best_block_meta = old_best_block.get_metadata() + common_block_meta = context.reorg_common_block.get_metadata() + reorg_size = old_best_block_meta.height - common_block_meta.height + assert old_best_block != new_best_block + assert reorg_size > 0 + context.pubsub.publish(HathorEvents.REORG_STARTED, old_best_height=best_height, + old_best_block=old_best_block, new_best_height=new_best_height, + new_best_block=new_best_block, common_block=context.reorg_common_block, + reorg_size=reorg_size) + + # finally signal an index update for all affected transactions + for tx_affected in context.txs_affected: + assert tx_affected.storage is not None + assert tx_affected.storage.indexes is not None + tx_affected.storage.indexes.update(tx_affected) + context.pubsub.publish(HathorEvents.CONSENSUS_TX_UPDATE, tx=tx_affected) + + # and also emit the reorg finished event if needed + if context.reorg_common_block is not None: + context.pubsub.publish(HathorEvents.REORG_FINISHED) + + def filter_out_soft_voided_entries(self, tx: BaseTransaction, voided_by: Set[bytes]) -> Set[bytes]: + if not (self.soft_voided_tx_ids & voided_by): + return voided_by + ret = set() + for h in voided_by: + if h == settings.SOFT_VOIDED_ID: + continue + if h == tx.hash: + continue + if h in self.soft_voided_tx_ids: + continue + assert tx.storage is not None + tx3 = tx.storage.get_transaction(h) + tx3_meta = tx3.get_metadata() + tx3_voided_by: Set[bytes] = tx3_meta.voided_by or set() + if not (self.soft_voided_tx_ids & tx3_voided_by): + ret.add(h) + return ret diff --git a/hathor/consensus/context.py b/hathor/consensus/context.py new file mode 100644 index 000000000..85ce65536 --- /dev/null +++ b/hathor/consensus/context.py @@ -0,0 +1,64 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Optional, Set + +from structlog import get_logger + +from hathor.conf import HathorSettings +from hathor.profiler import get_cpu_profiler +from hathor.pubsub import PubSubManager +from hathor.transaction import BaseTransaction, Block + +if TYPE_CHECKING: + from hathor.consensus.block_consensus import BlockConsensusAlgorithm + from hathor.consensus.consensus import ConsensusAlgorithm + from hathor.consensus.transaction_consensus import TransactionConsensusAlgorithm + +logger = get_logger() +settings = HathorSettings() +cpu = get_cpu_profiler() + +_base_transaction_log = logger.new() + + +class ConsensusAlgorithmContext: + """ An instance of this class holds all the relevant information related to a single run of a consensus update. + """ + + consensus: 'ConsensusAlgorithm' + pubsub: PubSubManager + block_algorithm: 'BlockConsensusAlgorithm' + transaction_algorithm: 'TransactionConsensusAlgorithm' + txs_affected: Set[BaseTransaction] + reorg_common_block: Optional[Block] + + def __init__(self, consensus: 'ConsensusAlgorithm', pubsub: PubSubManager) -> None: + self.consensus = consensus + self.pubsub = pubsub + self.block_algorithm = self.consensus.block_algorithm_factory(self) + self.transaction_algorithm = self.consensus.transaction_algorithm_factory(self) + self.txs_affected = set() + self.reorg_common_block = None + + def save(self, tx: BaseTransaction) -> None: + """Only metadata is ever saved in a consensus update.""" + assert tx.storage is not None + self.txs_affected.add(tx) + tx.storage.save_transaction(tx, only_metadata=True) + + def mark_as_reorg(self, common_block: Block) -> None: + """Must only be called once, will raise an assert error if called twice.""" + assert self.reorg_common_block is None + self.reorg_common_block = common_block diff --git a/hathor/consensus/transaction_consensus.py b/hathor/consensus/transaction_consensus.py new file mode 100644 index 000000000..882c2936a --- /dev/null +++ b/hathor/consensus/transaction_consensus.py @@ -0,0 +1,443 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Iterable, List, Set, cast + +from structlog import get_logger + +from hathor.conf import HathorSettings +from hathor.profiler import get_cpu_profiler +from hathor.transaction import BaseTransaction, Block, Transaction, TxInput, sum_weights +from hathor.util import classproperty + +if TYPE_CHECKING: + from hathor.consensus.context import ConsensusAlgorithmContext + +logger = get_logger() +settings = HathorSettings() +cpu = get_cpu_profiler() + +_base_transaction_log = logger.new() + + +class TransactionConsensusAlgorithm: + """Implement the consensus algorithm for transactions.""" + + def __init__(self, context: 'ConsensusAlgorithmContext') -> None: + self.context = context + + @classproperty + def log(cls): + """ This is a workaround because of a bug on structlog (or abc). + + See: https://github.com/hynek/structlog/issues/229 + """ + return _base_transaction_log + + def update_consensus(self, tx: Transaction) -> None: + self.mark_inputs_as_used(tx) + self.update_voided_info(tx) + self.set_conflict_twins(tx) + + def mark_inputs_as_used(self, tx: Transaction) -> None: + """ Mark all its inputs as used + """ + for txin in tx.inputs: + self.mark_input_as_used(tx, txin) + + def mark_input_as_used(self, tx: Transaction, txin: TxInput) -> None: + """ Mark a given input as used + """ + assert tx.hash is not None + assert tx.storage is not None + + spent_tx = tx.storage.get_transaction(txin.tx_id) + spent_meta = spent_tx.get_metadata() + spent_by = spent_meta.spent_outputs[txin.index] + assert tx.hash not in spent_by + + # Update our meta.conflict_with. + meta = tx.get_metadata() + if spent_by: + # We initially void ourselves. This conflict will be resolved later. + if not meta.voided_by: + meta.voided_by = {tx.hash} + else: + meta.voided_by.add(tx.hash) + if meta.conflict_with: + meta.conflict_with.extend(set(spent_by) - set(meta.conflict_with)) + else: + meta.conflict_with = spent_by.copy() + self.context.save(tx) + + for h in spent_by: + # Update meta.conflict_with of our conflict transactions. + conflict_tx = tx.storage.get_transaction(h) + tx_meta = conflict_tx.get_metadata() + if tx_meta.conflict_with: + if tx.hash not in tx_meta.conflict_with: + # We could use a set instead of a list but it consumes ~2.15 times more of memory. + tx_meta.conflict_with.append(tx.hash) + else: + tx_meta.conflict_with = [tx.hash] + self.context.save(conflict_tx) + + # Add ourselves to meta.spent_by of our input. + spent_by.append(tx.hash) + self.context.save(spent_tx) + + def set_conflict_twins(self, tx: Transaction) -> None: + """ Get all transactions that conflict with self + and check if they are also a twin of self + """ + assert tx.storage is not None + + meta = tx.get_metadata() + if not meta.conflict_with: + return + + conflict_txs = [tx.storage.get_transaction(h) for h in meta.conflict_with] + self.check_twins(tx, conflict_txs) + + def check_twins(self, tx: Transaction, transactions: Iterable[BaseTransaction]) -> None: + """ Check if the tx has any twins in transactions list + A twin tx is a tx that has the same inputs and outputs + We add all the hashes of the twin txs in the metadata + + :param transactions: list of transactions to be checked if they are twins with self + """ + assert tx.hash is not None + assert tx.storage is not None + + # Getting tx metadata to save the new twins + meta = tx.get_metadata() + + # Sorting inputs and outputs for easier validation + sorted_inputs = sorted(tx.inputs, key=lambda x: (x.tx_id, x.index, x.data)) + sorted_outputs = sorted(tx.outputs, key=lambda x: (x.script, x.value)) + + for candidate in transactions: + assert candidate.hash is not None + + # If quantity of inputs is different, it's not a twin. + if len(candidate.inputs) != len(tx.inputs): + continue + + # If quantity of outputs is different, it's not a twin. + if len(candidate.outputs) != len(tx.outputs): + continue + + # If the hash is the same, it's not a twin. + if candidate.hash == tx.hash: + continue + + # Verify if all the inputs are the same + equal = True + for index, tx_input in enumerate(sorted(candidate.inputs, key=lambda x: (x.tx_id, x.index, x.data))): + if (tx_input.tx_id != sorted_inputs[index].tx_id or tx_input.data != sorted_inputs[index].data + or tx_input.index != sorted_inputs[index].index): + equal = False + break + + # Verify if all the outputs are the same + if equal: + for index, tx_output in enumerate(sorted(candidate.outputs, key=lambda x: (x.script, x.value))): + if (tx_output.value != sorted_outputs[index].value + or tx_output.script != sorted_outputs[index].script): + equal = False + break + + # If everything is equal we add in both metadatas + if equal: + meta.twins.append(candidate.hash) + tx_meta = candidate.get_metadata() + tx_meta.twins.append(tx.hash) + self.context.save(candidate) + + self.context.save(tx) + + def update_voided_info(self, tx: Transaction) -> None: + """ This method should be called only once when the transactions is added to the DAG. + """ + assert tx.hash is not None + assert tx.storage is not None + + voided_by: Set[bytes] = set() + + # Union of voided_by of parents + for parent in tx.get_parents(): + parent_meta = parent.get_metadata() + if parent_meta.voided_by: + voided_by.update(self.context.consensus.filter_out_soft_voided_entries(parent, parent_meta.voided_by)) + assert settings.SOFT_VOIDED_ID not in voided_by + assert not (self.context.consensus.soft_voided_tx_ids & voided_by) + + # Union of voided_by of inputs + for txin in tx.inputs: + spent_tx = tx.storage.get_transaction(txin.tx_id) + spent_meta = spent_tx.get_metadata() + if spent_meta.voided_by: + voided_by.update(spent_meta.voided_by) + voided_by.discard(settings.SOFT_VOIDED_ID) + assert settings.SOFT_VOIDED_ID not in voided_by + + # Update accumulated weight of the transactions voiding us. + assert tx.hash not in voided_by + for h in voided_by: + if h == settings.SOFT_VOIDED_ID: + continue + tx2 = tx.storage.get_transaction(h) + tx2_meta = tx2.get_metadata() + tx2_meta.accumulated_weight = sum_weights(tx2_meta.accumulated_weight, tx.weight) + self.context.save(tx2) + + # Then, we add ourselves. + meta = tx.get_metadata() + assert not meta.voided_by or meta.voided_by == {tx.hash} + assert meta.accumulated_weight == tx.weight + if tx.hash in self.context.consensus.soft_voided_tx_ids: + voided_by.add(settings.SOFT_VOIDED_ID) + voided_by.add(tx.hash) + if meta.conflict_with: + voided_by.add(tx.hash) + + # We must save before marking conflicts as voided because + # the conflicting tx might affect this tx's voided_by metadata. + if voided_by: + meta.voided_by = voided_by.copy() + self.context.save(tx) + tx.storage.del_from_indexes(tx) + + # Check conflicts of the transactions voiding us. + for h in voided_by: + if h == settings.SOFT_VOIDED_ID: + continue + if h == tx.hash: + continue + tx2 = tx.storage.get_transaction(h) + if not tx2.is_block: + assert isinstance(tx2, Transaction) + self.check_conflicts(tx2) + + # Mark voided conflicts as voided. + for h in meta.conflict_with or []: + conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) + conflict_tx_meta = conflict_tx.get_metadata() + if conflict_tx_meta.voided_by: + self.mark_as_voided(conflict_tx) + + # Finally, check our conflicts. + meta = tx.get_metadata() + if meta.voided_by == {tx.hash}: + self.check_conflicts(tx) + + # Assert the final state is valid. + self.assert_valid_consensus(tx) + + def assert_valid_consensus(self, tx: BaseTransaction) -> None: + """Assert the conflict resolution is valid.""" + meta = tx.get_metadata() + is_tx_executed = bool(not meta.voided_by) + for h in meta.conflict_with or []: + assert tx.storage is not None + conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) + conflict_tx_meta = conflict_tx.get_metadata() + is_conflict_tx_executed = bool(not conflict_tx_meta.voided_by) + assert not (is_tx_executed and is_conflict_tx_executed) + + def check_conflicts(self, tx: Transaction) -> None: + """ Check which transaction is the winner of a conflict, the remaining are voided. + + The verification is made for each input, and `self` is only marked as winner if it + wins in all its inputs. + """ + assert tx.hash is not None + assert tx.storage is not None + self.log.debug('tx.check_conflicts', tx=tx.hash_hex) + + meta = tx.get_metadata() + if meta.voided_by != {tx.hash}: + return + + # Filter the possible candidates to compare to tx. + candidates: List[Transaction] = [] + conflict_list: List[Transaction] = [] + for h in meta.conflict_with or []: + conflict_tx = cast(Transaction, tx.storage.get_transaction(h)) + conflict_list.append(conflict_tx) + conflict_tx_meta = conflict_tx.get_metadata() + if not conflict_tx_meta.voided_by or conflict_tx_meta.voided_by == {conflict_tx.hash}: + candidates.append(conflict_tx) + + # Check whether we have the highest accumulated weight. + # First with the voided transactions. + is_highest = True + for candidate in candidates: + tx_meta = candidate.get_metadata() + if tx_meta.voided_by: + if tx_meta.accumulated_weight > meta.accumulated_weight: + is_highest = False + break + if not is_highest: + return + + # Then, with the executed transactions. + tie_list = [] + for candidate in candidates: + tx_meta = candidate.get_metadata() + if not tx_meta.voided_by: + candidate.update_accumulated_weight(stop_value=meta.accumulated_weight) + tx_meta = candidate.get_metadata() + d = tx_meta.accumulated_weight - meta.accumulated_weight + if abs(d) < settings.WEIGHT_TOL: + tie_list.append(candidate) + elif d > 0: + is_highest = False + break + if not is_highest: + return + + # If we got here, either it was a tie or we won. + # So, let's void the conflict txs. + for conflict_tx in conflict_list: + self.mark_as_voided(conflict_tx) + + if not tie_list: + # If it is not a tie, we won. \o/ + self.mark_as_winner(tx) + + def mark_as_winner(self, tx: Transaction) -> None: + """ Mark a transaction as winner when it has a conflict and its aggregated weight + is the greatest one. + """ + assert tx.hash is not None + self.log.debug('tx.mark_as_winner', tx=tx.hash_hex) + meta = tx.get_metadata() + assert bool(meta.conflict_with) # FIXME: this looks like a runtime guarantee, MUST NOT be an assert + assert meta.voided_by == {tx.hash} + assert tx.hash not in self.context.consensus.soft_voided_tx_ids + self.remove_voided_by(tx, tx.hash) + self.assert_valid_consensus(tx) + + def remove_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: + """ Remove a hash from `meta.voided_by` and its descendants (both from verification DAG + and funds tree). + """ + from hathor.transaction.storage.traversal import BFSWalk + + assert tx.hash is not None + assert tx.storage is not None + + meta = tx.get_metadata() + if not meta.voided_by: + return False + if voided_hash not in meta.voided_by: + return False + + self.log.debug('remove_voided_by', tx=tx.hash_hex, voided_hash=voided_hash.hex()) + + bfs = BFSWalk(tx.storage, is_dag_funds=True, is_dag_verifications=True, is_left_to_right=True) + check_list: List[BaseTransaction] = [] + for tx2 in bfs.run(tx, skip_root=False): + assert tx2.storage is not None + + meta2 = tx2.get_metadata() + if not (meta2.voided_by and voided_hash in meta2.voided_by): + bfs.skip_neighbors(tx2) + continue + if meta2.voided_by: + meta2.voided_by.discard(voided_hash) + if meta2.voided_by == {tx2.hash}: + check_list.append(tx2) + if not meta2.voided_by: + meta2.voided_by = None + tx.storage.add_to_indexes(tx2) + self.context.save(tx2) + self.assert_valid_consensus(tx2) + + from hathor.transaction import Transaction + for tx2 in check_list: + if not tx2.is_block: + assert isinstance(tx2, Transaction) + self.check_conflicts(tx2) + return True + + def mark_as_voided(self, tx: Transaction) -> None: + """ Mark a transaction as voided when it has a conflict and its aggregated weight + is NOT the greatest one. + """ + assert tx.hash is not None + self.log.debug('tx.mark_as_voided', tx=tx.hash_hex) + meta = tx.get_metadata() + assert bool(meta.conflict_with) + if meta.voided_by and tx.hash in meta.voided_by: + return + self.add_voided_by(tx, tx.hash) + self.assert_valid_consensus(tx) + + def add_voided_by(self, tx: Transaction, voided_hash: bytes) -> bool: + """ Add a hash from `meta.voided_by` and its descendants (both from verification DAG + and funds tree). + """ + assert tx.hash is not None + assert tx.storage is not None + + meta = tx.get_metadata() + if meta.voided_by and voided_hash in meta.voided_by: + return False + + self.log.debug('add_voided_by', tx=tx.hash_hex, voided_hash=voided_hash.hex()) + + is_dag_verifications = True + if meta.voided_by and bool(self.context.consensus.soft_voided_tx_ids & meta.voided_by): + # If tx is soft voided, we can only walk through the DAG of funds. + is_dag_verifications = False + + from hathor.transaction.storage.traversal import BFSWalk + bfs = BFSWalk(tx.storage, is_dag_funds=True, is_dag_verifications=is_dag_verifications, is_left_to_right=True) + check_list: List[Transaction] = [] + for tx2 in bfs.run(tx, skip_root=False): + assert tx2.storage is not None + assert tx2.hash is not None + meta2 = tx2.get_metadata() + + if tx2.is_block: + assert isinstance(tx2, Block) + self.context.block_algorithm.mark_as_voided(tx2) + tx2.storage.update_best_block_tips_cache(None) + + assert not meta2.voided_by or voided_hash not in meta2.voided_by + if tx2.hash != tx.hash and meta2.conflict_with and not meta2.voided_by: + check_list.extend(cast(Transaction, tx2.storage.get_transaction(h)) for h in meta2.conflict_with) + if meta2.voided_by: + meta2.voided_by.add(voided_hash) + else: + meta2.voided_by = {voided_hash} + if meta2.conflict_with: + assert isinstance(tx2, Transaction) + self.mark_as_voided(tx2) + # All voided transactions with conflicts must have their accumulated weight calculated. + tx2.update_accumulated_weight(save_file=False) + self.context.save(tx2) + tx2.storage.del_from_indexes(tx2, relax_assert=True) + self.assert_valid_consensus(tx2) + + for tx2 in check_list: + self.check_conflicts(tx2) + return True + + +class TransactionConsensusAlgorithmFactory: + def __call__(self, context: 'ConsensusAlgorithmContext') -> TransactionConsensusAlgorithm: + return TransactionConsensusAlgorithm(context) diff --git a/hathor/event/__init__.py b/hathor/event/__init__.py index 185a33cc2..db4368e35 100644 --- a/hathor/event/__init__.py +++ b/hathor/event/__init__.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -from hathor.event.base_event import BaseEvent from hathor.event.event_manager import EventManager -__all__ = ['BaseEvent', 'EventManager'] +__all__ = ['EventManager'] diff --git a/hathor/event/base_event.py b/hathor/event/base_event.py deleted file mode 100644 index 64923f52a..000000000 --- a/hathor/event/base_event.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2022 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from dataclasses import dataclass -from typing import Dict, Optional - - -@dataclass -class BaseEvent: - # Full node id, because different full nodes can have different sequences of events - peer_id: str - # Event unique id, determines event order - id: int - # Timestamp in which the event was emitted, this follows the unix_timestamp format, it's only informative, events - # aren't guaranteed to always have sequential timestamps, for example, if the system clock changes between two - # events it's possible that timestamps will temporarily decrease. - timestamp: float - # One of the event types - type: str - # Variable for event type - data: Dict - # Used to link events, for example, many TX_METADATA_CHANGED will have the same group_id when they belong to the - # same reorg process - group_id: Optional[int] = None diff --git a/hathor/event/event_manager.py b/hathor/event/event_manager.py index a008a40f4..4ab06cbe4 100644 --- a/hathor/event/event_manager.py +++ b/hathor/event/event_manager.py @@ -12,87 +12,189 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Callable, Optional from structlog import get_logger -from hathor.event.base_event import BaseEvent +from hathor.event.model.base_event import BaseEvent +from hathor.event.model.event_type import EventType from hathor.event.storage import EventStorage +from hathor.event.websocket import EventWebsocketFactory from hathor.pubsub import EventArguments, HathorEvents, PubSubManager from hathor.util import Reactor logger = get_logger() _GROUP_START_EVENTS = { - HathorEvents.REORG_STARTED, + EventType.REORG_STARTED, } _GROUP_END_EVENTS = { - HathorEvents.REORG_FINISHED, + EventType.REORG_FINISHED, } _SUBSCRIBE_EVENTS = [ - HathorEvents.NETWORK_NEW_TX_ACCEPTED, - HathorEvents.NETWORK_NEW_TX_VOIDED, - HathorEvents.NETWORK_BEST_BLOCK_FOUND, - HathorEvents.NETWORK_ORPHAN_BLOCK_FOUND, - HathorEvents.LOAD_STARTED, + HathorEvents.MANAGER_ON_START, HathorEvents.LOAD_FINISHED, + HathorEvents.NETWORK_NEW_TX_ACCEPTED, HathorEvents.REORG_STARTED, HathorEvents.REORG_FINISHED, - HathorEvents.TX_METADATA_CHANGED, - HathorEvents.BLOCK_METADATA_CHANGED, + HathorEvents.CONSENSUS_TX_UPDATE, ] class EventManager: - def __init__(self, event_storage: EventStorage, reactor: Reactor, peer_id: str): + """Class that manages integration events. + + Events are received from PubSub, persisted on the storage and sent to WebSocket clients. + """ + + _peer_id: str + _is_running: bool = False + _load_finished: bool = False + + @property + def event_storage(self) -> EventStorage: + return self._event_storage + + def __init__( + self, + event_storage: EventStorage, + event_ws_factory: EventWebsocketFactory, + pubsub: PubSubManager, + reactor: Reactor, + emit_load_events: bool = False + ): self.log = logger.new() - self.clock = reactor - self.event_storage = event_storage - last_event = event_storage.get_last_event() - last_event_type = HathorEvents(last_event.type) if last_event is not None else None + + self._clock = reactor + self._event_storage = event_storage + self._event_ws_factory = event_ws_factory + self._pubsub = pubsub + self.emit_load_events = emit_load_events + + self._last_event = self._event_storage.get_last_event() + self._last_existing_group_id = self._event_storage.get_last_group_id() + + self._assert_closed_event_group() + self._subscribe_events() + + def start(self, peer_id: str) -> None: + assert self._is_running is False, 'Cannot start, EventManager is already running' + + self._peer_id = peer_id + self._event_ws_factory.start() + self._is_running = True + + def stop(self): + assert self._is_running is True, 'Cannot stop, EventManager is not running' + + self._event_ws_factory.stop() + self._is_running = False + + def _assert_closed_event_group(self): # XXX: we must check that the last event either does not belong to an event group or that it just closed an # event group, because we cannot resume an open group of events that wasn't properly closed before exit assert ( - last_event is None or - last_event.group_id is None or - last_event_type in _GROUP_END_EVENTS + self._event_group_is_closed() ), 'an unclosed event group was detected, which indicates the node crashed, cannot resume' - self._next_event_id = 0 if last_event is None else last_event.id + 1 - last_group_id = event_storage.get_last_group_id() - self._next_group_id = 0 if last_group_id is None else last_group_id + 1 - self._current_group_id: Optional[int] = None - self._peer_id = peer_id - def subscribe(self, pubsub: PubSubManager) -> None: + def _event_group_is_closed(self): + return ( + self._last_event is None or + self._last_event.group_id is None or + EventType(self._last_event.type) in _GROUP_END_EVENTS + ) + + def _subscribe_events(self): """ Subscribe to defined events for the pubsub received """ for event in _SUBSCRIBE_EVENTS: - pubsub.subscribe(event, self._persist_event) - - def _persist_event(self, event: HathorEvents, args: EventArguments) -> None: - event_data = args.__dict__['event'] - group_id: Optional[int] - if event in _GROUP_START_EVENTS: - assert self._current_group_id is None, 'cannot start an event group before the last one is ended' - group_id = self._next_group_id + self._pubsub.subscribe(event, self._handle_event) + + def _handle_event(self, hathor_event: HathorEvents, event_args: EventArguments) -> None: + assert self._is_running, 'Cannot handle event, EventManager is not started.' + + event_type = EventType.from_hathor_event(hathor_event) + event_specific_handlers = { + EventType.LOAD_FINISHED: self._handle_load_finished + } + + if event_specific_handler := event_specific_handlers.get(event_type): + event_specific_handler() + + if not self._load_finished and not self.emit_load_events: + return + + self._handle_event_creation(event_type, event_args) + + def _handle_event_creation(self, event_type: EventType, event_args: EventArguments) -> None: + create_event_fn: Callable[[EventType, EventArguments], BaseEvent] + + if event_type in _GROUP_START_EVENTS: + create_event_fn = self._create_group_start_event + elif event_type in _GROUP_END_EVENTS: + create_event_fn = self._create_group_end_event else: - group_id = self._current_group_id - if event in _GROUP_END_EVENTS: - assert self._current_group_id is not None, 'cannot end group twice' - event_to_store = BaseEvent( - id=self._next_event_id, + create_event_fn = self._create_non_group_edge_event + + event = create_event_fn(event_type, event_args) + + self._event_storage.save_event(event) + self._event_ws_factory.broadcast_event(event) + + self._last_event = event + + def _create_group_start_event(self, event_type: EventType, event_args: EventArguments) -> BaseEvent: + assert self._event_group_is_closed(), 'A new event group cannot be started as one is already in progress.' + + new_group_id = 0 if self._last_existing_group_id is None else self._last_existing_group_id + 1 + + self._last_existing_group_id = new_group_id + + return self._create_event( + event_type=event_type, + event_args=event_args, + group_id=new_group_id, + ) + + def _create_group_end_event(self, event_type: EventType, event_args: EventArguments) -> BaseEvent: + assert self._last_event is not None, 'Cannot end event group if there are no events.' + assert not self._event_group_is_closed(), 'Cannot end event group as none is in progress.' + + return self._create_event( + event_type=event_type, + event_args=event_args, + group_id=self._last_event.group_id, + ) + + def _create_non_group_edge_event(self, event_type: EventType, event_args: EventArguments) -> BaseEvent: + group_id = None + + if not self._event_group_is_closed(): + assert self._last_event is not None, 'Cannot continue event group if there are no events.' + group_id = self._last_event.group_id + + return self._create_event( + event_type=event_type, + event_args=event_args, + group_id=group_id, + ) + + def _handle_load_finished(self): + self._load_finished = True + + def _create_event( + self, + event_type: EventType, + event_args: EventArguments, + group_id: Optional[int], + ) -> BaseEvent: + return BaseEvent.from_event_arguments( + event_id=0 if self._last_event is None else self._last_event.id + 1, peer_id=self._peer_id, - timestamp=self.clock.seconds(), - type=event.value, - data=event_data, + timestamp=self._clock.seconds(), + event_type=event_type, + event_args=event_args, group_id=group_id, ) - self.event_storage.save_event(event_to_store) - self._next_event_id += 1 - if event in _GROUP_START_EVENTS: - self._current_group_id = self._next_group_id - self._next_group_id += 1 - if event in _GROUP_END_EVENTS: - self._current_group_id = None diff --git a/hathor/event/model/__init__.py b/hathor/event/model/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathor/event/model/base_event.py b/hathor/event/model/base_event.py new file mode 100644 index 000000000..9c1e0c6b4 --- /dev/null +++ b/hathor/event/model/base_event.py @@ -0,0 +1,71 @@ +# Copyright 2022 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from pydantic import NonNegativeInt, validator + +from hathor.event.model.event_data import EventData +from hathor.event.model.event_type import EventType +from hathor.pubsub import EventArguments +from hathor.utils.pydantic import BaseModel + + +class BaseEvent(BaseModel, use_enum_values=True): + # Full node id, because different full nodes can have different sequences of events + peer_id: str + # Event unique id, determines event order + id: NonNegativeInt + # Timestamp in which the event was emitted, this follows the unix_timestamp format, it's only informative, events + # aren't guaranteed to always have sequential timestamps, for example, if the system clock changes between two + # events it's possible that timestamps will temporarily decrease. + timestamp: float + # One of the event types + type: EventType + # Variable for event type + data: EventData + # Used to link events, for example, many TX_METADATA_CHANGED will have the same group_id when they belong to the + # same reorg process + group_id: Optional[NonNegativeInt] = None + + @classmethod + def from_event_arguments( + cls, + peer_id: str, + event_id: NonNegativeInt, + timestamp: float, + event_type: EventType, + event_args: EventArguments, + group_id: Optional[NonNegativeInt] + ) -> 'BaseEvent': + event_data_type = event_type.data_type() + + return cls( + peer_id=peer_id, + id=event_id, + timestamp=timestamp, + type=event_type, + data=event_data_type.from_event_arguments(event_args), + group_id=group_id, + ) + + @validator('data') + def data_type_must_match_event_type(cls, v, values): + event_type = EventType(values['type']) + expected_data_type = event_type.data_type() + + if type(v) != expected_data_type: + raise ValueError('event data type does not match event type') + + return v diff --git a/hathor/event/model/event_data.py b/hathor/event/model/event_data.py new file mode 100644 index 000000000..f794be3a0 --- /dev/null +++ b/hathor/event/model/event_data.py @@ -0,0 +1,127 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Optional, Union, cast + +from pydantic import Extra, validator + +from hathor.pubsub import EventArguments +from hathor.utils.pydantic import BaseModel + + +class TxInput(BaseModel): + tx_id: str + index: int + data: str + + +class TxOutput(BaseModel): + value: int + script: str + token_data: int + + +class SpentOutput(BaseModel): + index: int + tx_ids: List[str] + + +class TxMetadata(BaseModel, extra=Extra.ignore): + hash: str + spent_outputs: List[SpentOutput] + conflict_with: List[str] + voided_by: List[str] + received_by: List[int] + children: List[str] + twins: List[str] + accumulated_weight: float + score: float + first_block: Optional[str] + height: int + validation: str + + @validator('spent_outputs', pre=True, each_item=True) + def _parse_spent_outputs(cls, spent_output: Union[SpentOutput, List[Union[int, List[str]]]]) -> SpentOutput: + """ + This validator method is called by pydantic when parsing models, and is not supposed to be called directly. + It either returns a SpentOutput if it receives one, or tries to parse it as a list (as returned from + metadata.to_json() method). Examples: + + >>> TxMetadata._parse_spent_outputs(SpentOutput(index=0, tx_ids=['tx1', 'tx2'])) + SpentOutput(index=0, tx_ids=['tx1', 'tx2']) + >>> TxMetadata._parse_spent_outputs([0, ['tx1', 'tx2']]) + SpentOutput(index=0, tx_ids=['tx1', 'tx2']) + """ + if isinstance(spent_output, SpentOutput): + return spent_output + + index, tx_ids = spent_output + + return SpentOutput( + index=cast(int, index), + tx_ids=cast(List[str], tx_ids) + ) + + +class BaseEventData(BaseModel): + @classmethod + def from_event_arguments(cls, args: EventArguments) -> 'EventData': + raise NotImplementedError() + + +class EmptyData(BaseEventData): + @classmethod + def from_event_arguments(cls, args: EventArguments) -> 'EmptyData': + return cls() + + +class TxData(BaseEventData, extra=Extra.ignore): + hash: str + nonce: int + timestamp: int + version: int + weight: float + inputs: List['TxInput'] + outputs: List['TxOutput'] + parents: List[str] + tokens: List[str] + # TODO: Token name and symbol could be in a different class because they're only used by TokenCreationTransaction + token_name: Optional[str] + token_symbol: Optional[str] + metadata: 'TxMetadata' + + @classmethod + def from_event_arguments(cls, args: EventArguments) -> 'TxData': + tx_json = args.tx.to_json(include_metadata=True) + + return cls(**tx_json) + + +class ReorgData(BaseEventData): + reorg_size: int + previous_best_block: str + new_best_block: str + common_block: str + + @classmethod + def from_event_arguments(cls, args: EventArguments) -> 'ReorgData': + return cls( + reorg_size=args.reorg_size, + previous_best_block=args.old_best_block.hash_hex, + new_best_block=args.new_best_block.hash_hex, + common_block=args.common_block.hash_hex, + ) + + +EventData = Union[EmptyData, TxData, ReorgData] diff --git a/hathor/event/model/event_type.py b/hathor/event/model/event_type.py new file mode 100644 index 000000000..ff3df2f62 --- /dev/null +++ b/hathor/event/model/event_type.py @@ -0,0 +1,58 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum +from typing import Dict, Type + +from hathor.event.model.event_data import BaseEventData, EmptyData, ReorgData, TxData +from hathor.pubsub import HathorEvents + + +class EventType(Enum): + LOAD_STARTED = 'LOAD_STARTED' + LOAD_FINISHED = 'LOAD_FINISHED' + NEW_VERTEX_ACCEPTED = 'NEW_VERTEX_ACCEPTED' + REORG_STARTED = 'REORG_STARTED' + REORG_FINISHED = 'REORG_FINISHED' + VERTEX_METADATA_CHANGED = 'VERTEX_METADATA_CHANGED' + + @classmethod + def from_hathor_event(cls, hathor_event: HathorEvents) -> 'EventType': + event = _HATHOR_EVENT_TO_EVENT_TYPE.get(hathor_event) + + assert event is not None, f'Cannot create EventType from {hathor_event}' + + return event + + def data_type(self) -> Type[BaseEventData]: + return _EVENT_TYPE_TO_EVENT_DATA[self] + + +_HATHOR_EVENT_TO_EVENT_TYPE = { + HathorEvents.MANAGER_ON_START: EventType.LOAD_STARTED, + HathorEvents.LOAD_FINISHED: EventType.LOAD_FINISHED, + HathorEvents.NETWORK_NEW_TX_ACCEPTED: EventType.NEW_VERTEX_ACCEPTED, + HathorEvents.REORG_STARTED: EventType.REORG_STARTED, + HathorEvents.REORG_FINISHED: EventType.REORG_FINISHED, + HathorEvents.CONSENSUS_TX_UPDATE: EventType.VERTEX_METADATA_CHANGED +} + +_EVENT_TYPE_TO_EVENT_DATA: Dict[EventType, Type[BaseEventData]] = { + EventType.LOAD_STARTED: EmptyData, + EventType.LOAD_FINISHED: EmptyData, + EventType.NEW_VERTEX_ACCEPTED: TxData, + EventType.REORG_STARTED: ReorgData, + EventType.REORG_FINISHED: EmptyData, + EventType.VERTEX_METADATA_CHANGED: TxData, +} diff --git a/hathor/event/resources/__init__.py b/hathor/event/resources/__init__.py new file mode 100644 index 000000000..550e4ae20 --- /dev/null +++ b/hathor/event/resources/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/hathor/event/resources/event.py b/hathor/event/resources/event.py new file mode 100644 index 000000000..cde4e6740 --- /dev/null +++ b/hathor/event/resources/event.py @@ -0,0 +1,146 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from itertools import islice +from typing import List, Optional + +from pydantic import Field, NonNegativeInt + +from hathor.api_util import Resource, set_cors +from hathor.cli.openapi_files.register import register_resource +from hathor.conf import HathorSettings +from hathor.event import EventManager +from hathor.event.model.base_event import BaseEvent +from hathor.utils.api import ErrorResponse, QueryParams, Response + +settings = HathorSettings() + + +@register_resource +class EventResource(Resource): + isLeaf = True + + def __init__(self, event_manager: Optional[EventManager]): + super().__init__() + self.event_manager = event_manager + + def render_GET(self, request): + request.setHeader(b'content-type', b'application/json; charset=utf-8') + set_cors(request, 'GET') + + if not self.event_manager: + request.setResponseCode(503) + + return ErrorResponse(error='EventManager unavailable.').json_dumpb() + + params = GetEventsParams.from_request(request) + + if isinstance(params, ErrorResponse): + return params.json_dumpb() + + next_event_id = 0 if params.last_ack_event_id is None else params.last_ack_event_id + 1 + event_iter = self.event_manager.event_storage.iter_from_event(next_event_id) + last_event = self.event_manager.event_storage.get_last_event() + last_event_id = last_event.id if last_event is not None else None + + response = GetEventsResponse( + latest_event_id=last_event_id, + events=list( + islice(event_iter, params.size) + ) + ) + + return response.json_dumpb() + + +class GetEventsParams(QueryParams): + last_ack_event_id: Optional[NonNegativeInt] + size: int = Field(default=settings.EVENT_API_DEFAULT_BATCH_SIZE, ge=0, le=settings.EVENT_API_MAX_BATCH_SIZE) + + +class GetEventsResponse(Response): + events: List[BaseEvent] + latest_event_id: Optional[int] + + +EventResource.openapi = { + '/event': { + 'x-visibility': 'private', + 'get': { + 'operationId': 'event', + 'summary': 'Hathor Events', + 'description': 'Returns information about past events', + 'parameters': [ + { + 'name': 'last_ack_event_id', + 'in': 'query', + 'description': 'ID of last acknowledged event', + 'required': False, + 'schema': { + 'type': 'int' + } + }, + { + 'name': 'size', + 'in': 'query', + 'description': 'Amount of events', + 'required': False, + 'schema': { + 'type': 'int' + } + } + ], + 'responses': { + '200': { + 'description': 'Success', + 'content': { + 'application/json': { + 'examples': { + 'success': { + "events": [ + { + "peer_id": ("315d290c818091e5f01b8e52c45e7e24" + "f2558ba4376f423358fdc4c71d70da9a"), + "id": 0, + "timestamp": 1676332496.991634, + "type": "consensus:tx_update", + "data": { + "hash": ("00000000030b86022eaea447484bd4d7" + "70be0fbd7e03678967f601c315673c5c") + }, + "group_id": None + }, + { + "peer_id": ("315d290c818091e5f01b8e52c45e7e24" + "f2558ba4376f423358fdc4c71d70da9a"), + "id": 1, + "timestamp": 1676332497.1872509, + "type": "network:new_tx_accepted", + "data": { + "hash": ("00000000030b86022eaea447484bd4d7" + "70be0fbd7e03678967f601c315673c5c") + }, + "group_id": None + } + ], + "latest_event_id": 342 + } + } + } + } + } + } + } + } +} diff --git a/hathor/event/storage/event_storage.py b/hathor/event/storage/event_storage.py index a16ffc59f..c91b14d79 100644 --- a/hathor/event/storage/event_storage.py +++ b/hathor/event/storage/event_storage.py @@ -13,9 +13,9 @@ # limitations under the License. from abc import ABC, abstractmethod -from typing import Optional +from typing import Iterator, Optional -from hathor.event.base_event import BaseEvent +from hathor.event.model.base_event import BaseEvent class EventStorage(ABC): @@ -38,3 +38,8 @@ def get_last_event(self) -> Optional[BaseEvent]: def get_last_group_id(self) -> Optional[int]: """ Get the last group-id that was emitted, this is used to help resume when restarting.""" raise NotImplementedError + + @abstractmethod + def iter_from_event(self, key: int) -> Iterator[BaseEvent]: + """ Iterate through events starting from the event with the given key""" + raise NotImplementedError diff --git a/hathor/event/storage/memory_storage.py b/hathor/event/storage/memory_storage.py index c61bb0471..3d28a2035 100644 --- a/hathor/event/storage/memory_storage.py +++ b/hathor/event/storage/memory_storage.py @@ -12,21 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import List, Optional +from typing import Iterator, List, Optional -from hathor.event.base_event import BaseEvent +from hathor.event.model.base_event import BaseEvent from hathor.event.storage.event_storage import EventStorage class EventMemoryStorage(EventStorage): - def __init__(self): + def __init__(self) -> None: self._events: List[BaseEvent] = [] self._last_event: Optional[BaseEvent] = None self._last_group_id: Optional[int] = None def save_event(self, event: BaseEvent) -> None: - if event.id < 0: - raise ValueError('event.id must be non-negative') if event.id != len(self._events): raise ValueError('invalid event.id, ids must be sequential and leave no gaps') self._last_event = event @@ -36,7 +34,7 @@ def save_event(self, event: BaseEvent) -> None: def get_event(self, key: int) -> Optional[BaseEvent]: if key < 0: - raise ValueError('key must be non-negative') + raise ValueError(f'event.id \'{key}\' must be non-negative') if key >= len(self._events): return None event = self._events[key] @@ -48,3 +46,11 @@ def get_last_event(self) -> Optional[BaseEvent]: def get_last_group_id(self) -> Optional[int]: return self._last_group_id + + def iter_from_event(self, key: int) -> Iterator[BaseEvent]: + if key < 0: + raise ValueError(f'event.id \'{key}\' must be non-negative') + + while key < len(self._events): + yield self._events[key] + key += 1 diff --git a/hathor/event/storage/rocksdb_storage.py b/hathor/event/storage/rocksdb_storage.py index 83c8b48b3..5f5f02424 100644 --- a/hathor/event/storage/rocksdb_storage.py +++ b/hathor/event/storage/rocksdb_storage.py @@ -12,13 +12,13 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Optional +from typing import Iterator, Optional -from hathor.event.base_event import BaseEvent +from hathor.event.model.base_event import BaseEvent from hathor.event.storage.event_storage import EventStorage from hathor.storage.rocksdb_storage import RocksDBStorage from hathor.transaction.util import int_to_bytes -from hathor.util import json_dumpb, json_loadb +from hathor.util import json_dumpb _CF_NAME_EVENT = b'event' _CF_NAME_META = b'event-metadata' @@ -33,16 +33,15 @@ def __init__(self, rocksdb_storage: RocksDBStorage): self._last_event: Optional[BaseEvent] = self._db_get_last_event() self._last_group_id: Optional[int] = self._db_get_last_group_id() - def _load_from_bytes(self, event_data: bytes) -> BaseEvent: - event_dict = json_loadb(event_data) - return BaseEvent( - id=event_dict['id'], - peer_id=event_dict['peer_id'], - timestamp=event_dict['timestamp'], - type=event_dict['type'], - group_id=event_dict['group_id'], - data=event_dict['data'], - ) + def iter_from_event(self, key: int) -> Iterator[BaseEvent]: + if key < 0: + raise ValueError(f'event.id \'{key}\' must be non-negative') + + it = self._db.itervalues(self._cf_event) + it.seek(int_to_bytes(key, 8)) + + for event_bytes in it: + yield BaseEvent.parse_raw(event_bytes) def _db_get_last_event(self) -> Optional[BaseEvent]: last_element: Optional[bytes] = None @@ -52,7 +51,7 @@ def _db_get_last_event(self) -> Optional[BaseEvent]: for i in it: last_element = i break - return None if last_element is None else self._load_from_bytes(last_element) + return None if last_element is None else BaseEvent.parse_raw(last_element) def _db_get_last_group_id(self) -> Optional[int]: last_group_id = self._db.get((self._cf_meta, _KEY_LAST_GROUP_ID)) @@ -61,12 +60,10 @@ def _db_get_last_group_id(self) -> Optional[int]: return int.from_bytes(last_group_id, byteorder='big', signed=False) def save_event(self, event: BaseEvent) -> None: - if event.id < 0: - raise ValueError('event.id must be non-negative') if (self._last_event is None and event.id != 0) or \ (self._last_event is not None and event.id > self._last_event.id + 1): raise ValueError('invalid event.id, ids must be sequential and leave no gaps') - event_data = json_dumpb(event.__dict__) + event_data = json_dumpb(event.dict()) key = int_to_bytes(event.id, 8) self._db.put((self._cf_event, key), event_data) self._last_event = event @@ -76,11 +73,11 @@ def save_event(self, event: BaseEvent) -> None: def get_event(self, key: int) -> Optional[BaseEvent]: if key < 0: - raise ValueError('key must be non-negative') + raise ValueError(f'event.id \'{key}\' must be non-negative') event = self._db.get((self._cf_event, int_to_bytes(key, 8))) if event is None: return None - return self._load_from_bytes(event_data=event) + return BaseEvent.parse_raw(event) def get_last_event(self) -> Optional[BaseEvent]: return self._last_event diff --git a/hathor/event/websocket/__init__.py b/hathor/event/websocket/__init__.py new file mode 100644 index 000000000..b831cff42 --- /dev/null +++ b/hathor/event/websocket/__init__.py @@ -0,0 +1,18 @@ +# Copyright 2022 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.event.websocket.factory import EventWebsocketFactory +from hathor.event.websocket.protocol import EventWebsocketProtocol + +__all__ = ['EventWebsocketFactory', 'EventWebsocketProtocol'] diff --git a/hathor/event/websocket/factory.py b/hathor/event/websocket/factory.py new file mode 100644 index 000000000..41b19dd14 --- /dev/null +++ b/hathor/event/websocket/factory.py @@ -0,0 +1,105 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Set + +from autobahn.twisted.websocket import WebSocketServerFactory +from structlog import get_logger + +from hathor.event.model.base_event import BaseEvent +from hathor.event.storage import EventStorage +from hathor.event.websocket.protocol import EventWebsocketProtocol +from hathor.event.websocket.response import EventResponse, InvalidRequestType +from hathor.util import Reactor + +logger = get_logger() + + +class EventWebsocketFactory(WebSocketServerFactory): + """ Websocket that will handle events + """ + + protocol = EventWebsocketProtocol + _is_running = False + _latest_event_id: Optional[int] = None + + def __init__(self, reactor: Reactor, event_storage: EventStorage): + super().__init__() + self.log = logger.new() + self._reactor = reactor + self._event_storage = event_storage + self._connections: Set[EventWebsocketProtocol] = set() + + latest_event = self._event_storage.get_last_event() + + if latest_event is not None: + self._latest_event_id = latest_event.id + + def start(self): + """Start the WebSocket server. Required to be able to send events.""" + assert self._is_running is False, 'Cannot start, EventWebsocketFactory is already running' + + self._is_running = True + + def stop(self): + """Stop the WebSocket server. No events can be sent.""" + assert self._is_running is True, 'Cannot stop, EventWebsocketFactory is not running' + + self._is_running = False + + for connection in self._connections: + connection.sendClose() + + self._connections.clear() + + def broadcast_event(self, event: BaseEvent) -> None: + """Broadcast the event to each registered client.""" + self._latest_event_id = event.id + + for connection in self._connections: + self._send_event_to_connection(connection, event) + + def register(self, connection: EventWebsocketProtocol) -> None: + """Registers a client. Called when a ws connection is opened (after handshaking).""" + if not self._is_running: + return connection.send_invalid_request_response(InvalidRequestType.EVENT_WS_NOT_RUNNING) + + self.log.info('registering connection', client_peer=connection.client_peer) + + self._connections.add(connection) + + def unregister(self, connection: EventWebsocketProtocol) -> None: + """Unregisters a client. Called when a ws connection is closed.""" + self.log.info('unregistering connection', client_peer=connection.client_peer) + self._connections.discard(connection) + + def send_next_event_to_connection(self, connection: EventWebsocketProtocol) -> None: + next_event_id = connection.next_expected_event_id() + + if not connection.can_receive_event(next_event_id): + return + + if event := self._event_storage.get_event(next_event_id): + self._send_event_to_connection(connection, event) + self._reactor.callLater(0, self.send_next_event_to_connection, connection) + + def _send_event_to_connection(self, connection: EventWebsocketProtocol, event: BaseEvent) -> None: + if not connection.can_receive_event(event.id): + return + + assert self._latest_event_id is not None, '_latest_event_id must be set.' + + response = EventResponse(event=event, latest_event_id=self._latest_event_id) + + connection.send_event_response(response) diff --git a/hathor/event/websocket/protocol.py b/hathor/event/websocket/protocol.py new file mode 100644 index 000000000..26c70891b --- /dev/null +++ b/hathor/event/websocket/protocol.py @@ -0,0 +1,186 @@ +# Copyright 2022 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Callable, Dict, Optional, Type + +from autobahn.exception import Disconnected +from autobahn.twisted.websocket import WebSocketServerProtocol +from autobahn.websocket import ConnectionRequest +from pydantic import ValidationError +from structlog import get_logger + +from hathor.event.websocket.request import AckRequest, Request, RequestWrapper, StartStreamRequest, StopStreamRequest +from hathor.event.websocket.response import EventResponse, InvalidRequestResponse, InvalidRequestType, Response +from hathor.util import json_dumpb + +if TYPE_CHECKING: + from hathor.event.websocket import EventWebsocketFactory + +logger = get_logger() + + +class EventWebsocketProtocol(WebSocketServerProtocol): + """ Websocket protocol, basically forwards some events to the Websocket factory. + """ + + factory: 'EventWebsocketFactory' + client_peer: Optional[str] = None + + _last_sent_event_id: Optional[int] = None + _ack_event_id: Optional[int] = None + _window_size: int = 0 + _stream_is_active: bool = False + + def __init__(self): + super().__init__() + self.log = logger.new() + + def can_receive_event(self, event_id: int) -> bool: + """Returns whether this client is available to receive an event.""" + number_of_pending_events = 0 + + if self._last_sent_event_id is not None: + ack_offset = -1 if self._ack_event_id is None else self._ack_event_id + number_of_pending_events = self._last_sent_event_id - ack_offset + + return ( + self._stream_is_active + and event_id == self.next_expected_event_id() + and number_of_pending_events < self._window_size + ) + + def next_expected_event_id(self) -> int: + """Returns the ID of the next event the client expects.""" + return 0 if self._last_sent_event_id is None else self._last_sent_event_id + 1 + + def onConnect(self, request: ConnectionRequest) -> None: + self.client_peer = request.peer + self.log = self.log.new(client_peer=self.client_peer) + self.log.info('connection opened to the event websocket, starting handshake...') + + def onOpen(self) -> None: + self.log.info('connection established to the event websocket') + self.factory.register(self) + + def onClose(self, wasClean: bool, code: int, reason: str) -> None: + self.log.info('connection closed to the event websocket', reason=reason) + self.factory.unregister(self) + + def onMessage(self, payload: bytes, isBinary: bool) -> None: + self.log.debug('message', payload=payload.hex() if isBinary else payload.decode('utf8')) + + try: + request = RequestWrapper.parse_raw_request(payload) + self._handle_request(request) + except ValidationError as error: + self.send_invalid_request_response(InvalidRequestType.VALIDATION_ERROR, payload, str(error)) + except InvalidRequestError as error: + self.send_invalid_request_response(error.type, payload) + + def _handle_request(self, request: Request) -> None: + # This could be a pattern match in Python 3.10 + request_type = type(request) + handlers: Dict[Type, Callable] = { + StartStreamRequest: self._handle_start_stream_request, + AckRequest: self._handle_ack_request, + StopStreamRequest: lambda _: self._handle_stop_stream_request() + } + handle_fn = handlers.get(request_type) + + assert handle_fn is not None, f'cannot handle request of unknown type "{request_type}"' + + handle_fn(request) + + def _handle_start_stream_request(self, request: StartStreamRequest) -> None: + if self._stream_is_active: + raise InvalidRequestError(InvalidRequestType.STREAM_IS_ACTIVE) + + self._validate_ack(request.last_ack_event_id) + + self._last_sent_event_id = request.last_ack_event_id + self._ack_event_id = request.last_ack_event_id + self._window_size = request.window_size + self._stream_is_active = True + + self.factory.send_next_event_to_connection(self) + + def _handle_ack_request(self, request: AckRequest) -> None: + if not self._stream_is_active: + raise InvalidRequestError(InvalidRequestType.STREAM_IS_INACTIVE) + + self._validate_ack(request.ack_event_id) + + self._ack_event_id = request.ack_event_id + self._window_size = request.window_size + + self.factory.send_next_event_to_connection(self) + + def _handle_stop_stream_request(self) -> None: + if not self._stream_is_active: + raise InvalidRequestError(InvalidRequestType.STREAM_IS_INACTIVE) + + self._stream_is_active = False + + def _validate_ack(self, ack_event_id: Optional[int]) -> None: + """Validates an ack_event_id from a request. + + The ack_event_id can't be smaller than the last ack we've received + and can't be larger than the last event we've sent. + """ + if self._ack_event_id is not None and ( + ack_event_id is None or ack_event_id < self._ack_event_id + ): + raise InvalidRequestError(InvalidRequestType.ACK_TOO_SMALL) + + if ack_event_id is not None and ( + self._last_sent_event_id is None or self._last_sent_event_id < ack_event_id + ): + raise InvalidRequestError(InvalidRequestType.ACK_TOO_LARGE) + + def send_event_response(self, event_response: EventResponse) -> None: + self._send_response(event_response) + self._last_sent_event_id = event_response.event.id + + def send_invalid_request_response( + self, + _type: InvalidRequestType, + invalid_payload: Optional[bytes] = None, + error_message: Optional[str] = None + ) -> None: + invalid_request = None if invalid_payload is None else invalid_payload.decode('utf8') + response = InvalidRequestResponse( + type=_type, + invalid_request=invalid_request, + error_message=error_message + ) + + self._send_response(response) + + def _send_response(self, response: Response) -> None: + payload = json_dumpb(response.dict()) + + try: + self.sendMessage(payload) + except Disconnected: + # Connection is closed. Nothing to do. + pass + # XXX: unfortunately autobahn can raise 3 different exceptions and one of them is a bare Exception + # https://github.com/crossbario/autobahn-python/blob/v20.12.3/autobahn/websocket/protocol.py#L2201-L2294 + except Exception: + self.log.error('send failed, moving on', exc_info=True) + + +class InvalidRequestError(Exception): + def __init__(self, _type: InvalidRequestType): + self.type = _type diff --git a/hathor/event/websocket/request.py b/hathor/event/websocket/request.py new file mode 100644 index 000000000..c4c5efd04 --- /dev/null +++ b/hathor/event/websocket/request.py @@ -0,0 +1,68 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Literal, Optional, Union + +from pydantic import NonNegativeInt + +from hathor.utils.pydantic import BaseModel + + +class StartStreamRequest(BaseModel): + """Class that represents a client request to start streaming events. + + Args: + type: The type of the request. + last_ack_event_id: The ID of the last event acknowledged by the client. + window_size: The amount of events the client is able to process. + """ + type: Literal['START_STREAM'] + last_ack_event_id: Optional[NonNegativeInt] + window_size: NonNegativeInt + + +class AckRequest(BaseModel): + """Class that represents a client request to ack and event and change the window size. + + Args: + type: The type of the request. + ack_event_id: The ID of the last event acknowledged by the client. + window_size: The amount of events the client is able to process. + """ + type: Literal['ACK'] + ack_event_id: NonNegativeInt + window_size: NonNegativeInt + + +class StopStreamRequest(BaseModel): + """Class that represents a client request to stop streaming events. + + Args: + type: The type of the request. + """ + type: Literal['STOP_STREAM'] + + +# This could be more performatic in Python 3.9: +# Request = Annotated[StartStreamRequest | AckRequest | StopStreamRequest, Field(discriminator='type')] +Request = Union[StartStreamRequest, AckRequest, StopStreamRequest] + + +class RequestWrapper(BaseModel): + """Class that wraps the Request union type for parsing.""" + __root__: Request + + @classmethod + def parse_raw_request(cls, raw: bytes) -> Request: + return cls.parse_raw(raw).__root__ diff --git a/hathor/event/websocket/response.py b/hathor/event/websocket/response.py new file mode 100644 index 000000000..618de54a6 --- /dev/null +++ b/hathor/event/websocket/response.py @@ -0,0 +1,61 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from enum import Enum +from typing import Optional + +from pydantic import Field, NonNegativeInt + +from hathor.event.model.base_event import BaseEvent +from hathor.utils.pydantic import BaseModel + + +class Response(BaseModel): + pass + + +class EventResponse(Response): + """Class that represents an event to be sent to the client. + + Args: + type: The type of the response. + event: The event. + latest_event_id: The ID of the latest event known by the server. + """ + + type: str = Field(default='EVENT', const=True) + event: BaseEvent + latest_event_id: NonNegativeInt + + +class InvalidRequestType(Enum): + EVENT_WS_NOT_RUNNING = 'EVENT_WS_NOT_RUNNING' + STREAM_IS_ACTIVE = 'STREAM_IS_ACTIVE' + STREAM_IS_INACTIVE = 'STREAM_IS_INACTIVE' + VALIDATION_ERROR = 'VALIDATION_ERROR' + ACK_TOO_SMALL = 'ACK_TOO_SMALL' + ACK_TOO_LARGE = 'ACK_TOO_LARGE' + + +class InvalidRequestResponse(Response, use_enum_values=True): + """Class to let the client know that it performed an invalid request. + + Args: + type: The type of the response. + invalid_request: The request that was invalid. + error_message: A message describing why the request was invalid. + """ + + type: InvalidRequestType + invalid_request: Optional[str] + error_message: Optional[str] diff --git a/hathor/indexes/address_index.py b/hathor/indexes/address_index.py index ebf848ada..73c8da1ef 100644 --- a/hathor/indexes/address_index.py +++ b/hathor/indexes/address_index.py @@ -17,24 +17,51 @@ from structlog import get_logger -from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope +from hathor.indexes.tx_group_index import TxGroupIndex +from hathor.pubsub import HathorEvents from hathor.transaction import BaseTransaction if TYPE_CHECKING: # pragma: no cover - from hathor.pubsub import PubSubManager + from hathor.pubsub import EventArguments, PubSubManager logger = get_logger() +SCOPE = Scope( + include_blocks=True, + include_txs=True, + include_voided=True, +) -class AddressIndex(BaseIndex): + +class AddressIndex(TxGroupIndex[str]): """ Index of inputs/outputs by address """ pubsub: Optional['PubSubManager'] + def get_scope(self) -> Scope: + return SCOPE + def init_loop_step(self, tx: BaseTransaction) -> None: self.add_tx(tx) - def publish_tx(self, tx: BaseTransaction, *, addresses: Optional[Iterable[str]] = None) -> None: + def _handle_tx_event(self, key: HathorEvents, args: 'EventArguments') -> None: + """ This method is called when pubsub publishes an event that we subscribed + """ + data = args.__dict__ + tx = data['tx'] + meta = tx.get_metadata() + if meta.has_voided_by_changed_since_last_call() or meta.has_spent_by_changed_since_last_call(): + self._publish_tx(tx) + + def _subscribe_pubsub_events(self) -> None: + """ Subscribe wallet index to receive voided/winner tx pubsub events + """ + assert self.pubsub is not None + # Subscribe to voided/winner events + self.pubsub.subscribe(HathorEvents.CONSENSUS_TX_UPDATE, self._handle_tx_event) + + def _publish_tx(self, tx: BaseTransaction, *, addresses: Optional[Iterable[str]] = None) -> None: """ Publish WALLET_ADDRESS_HISTORY for all addresses of a transaction. """ from hathor.pubsub import HathorEvents diff --git a/hathor/indexes/base_index.py b/hathor/indexes/base_index.py index 9503c673f..b68fc8b17 100644 --- a/hathor/indexes/base_index.py +++ b/hathor/indexes/base_index.py @@ -15,6 +15,7 @@ from abc import ABC, abstractmethod from typing import TYPE_CHECKING, Optional +from hathor.indexes.scope import Scope from hathor.transaction.base_transaction import BaseTransaction if TYPE_CHECKING: # pragma: no cover @@ -35,6 +36,11 @@ def init_start(self, indexes_manager: 'IndexesManager') -> None: """ pass + @abstractmethod + def get_scope(self) -> Scope: + """ Returns the scope of interest of this index, whether the scope is configurable is up to the index.""" + raise NotImplementedError + @abstractmethod def get_db_name(self) -> Optional[str]: """ The returned string is used to generate the relevant attributes for storing an indexe's state in the db. diff --git a/hathor/indexes/deps_index.py b/hathor/indexes/deps_index.py index 0b7d07276..81362deac 100644 --- a/hathor/indexes/deps_index.py +++ b/hathor/indexes/deps_index.py @@ -16,6 +16,7 @@ from typing import TYPE_CHECKING, Iterator, List from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction, Block if TYPE_CHECKING: # pragma: no cover @@ -25,6 +26,13 @@ # XXX: this arbitrary height limit must fit in a u32 (4-bytes unsigned), so it can be stored easily on rocksdb INF_HEIGHT: int = 2**32 - 1 +SCOPE = Scope( + include_blocks=True, + include_txs=True, + include_voided=True, + include_partial=True +) + def get_requested_from_height(tx: BaseTransaction) -> int: """Return the height of the block that requested (directly or indirectly) the download of this transaction. @@ -105,6 +113,9 @@ class DepsIndex(BaseIndex): them. """ + def get_scope(self) -> Scope: + return SCOPE + def init_loop_step(self, tx: BaseTransaction) -> None: tx_meta = tx.get_metadata() if tx_meta.voided_by: diff --git a/hathor/indexes/height_index.py b/hathor/indexes/height_index.py index 655fe7e70..2a62cfc2c 100644 --- a/hathor/indexes/height_index.py +++ b/hathor/indexes/height_index.py @@ -16,10 +16,17 @@ from typing import List, NamedTuple, Optional, Tuple from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction, Block from hathor.transaction.genesis import BLOCK_GENESIS from hathor.util import not_none +SCOPE = Scope( + include_blocks=True, + include_txs=False, + include_voided=True, +) + class IndexEntry(NamedTuple): """Helper named tuple that implementations can use.""" @@ -40,6 +47,9 @@ class HeightIndex(BaseIndex): """Store the block hash for each given height """ + def get_scope(self) -> Scope: + return SCOPE + def init_loop_step(self, tx: BaseTransaction) -> None: if not tx.is_block: return diff --git a/hathor/indexes/info_index.py b/hathor/indexes/info_index.py index 96c200eed..e69edbb0f 100644 --- a/hathor/indexes/info_index.py +++ b/hathor/indexes/info_index.py @@ -17,10 +17,19 @@ from structlog import get_logger from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction logger = get_logger() +SCOPE = Scope( + include_blocks=True, + include_txs=True, + include_voided=True, + # XXX: this index doesn't care about the ordering + topological_order=False, +) + class InfoIndex(BaseIndex): """ Index of general information about the storage @@ -30,6 +39,9 @@ def init_loop_step(self, tx: BaseTransaction) -> None: self.update_timestamps(tx) self.update_counts(tx) + def get_scope(self) -> Scope: + return SCOPE + @abstractmethod def update_timestamps(self, tx: BaseTransaction) -> None: raise NotImplementedError diff --git a/hathor/indexes/manager.py b/hathor/indexes/manager.py index bb4ccfe37..d6899f6a2 100644 --- a/hathor/indexes/manager.py +++ b/hathor/indexes/manager.py @@ -12,9 +12,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +import operator from abc import ABC, abstractmethod -from enum import Enum, auto -from typing import TYPE_CHECKING, Iterator, List, Optional, Tuple +from functools import reduce +from typing import TYPE_CHECKING, Iterator, List, Optional from structlog import get_logger @@ -24,8 +25,8 @@ from hathor.indexes.height_index import HeightIndex from hathor.indexes.info_index import InfoIndex from hathor.indexes.mempool_tips_index import MempoolTipsIndex -from hathor.indexes.timestamp_index import TimestampIndex -from hathor.indexes.tips_index import TipsIndex +from hathor.indexes.timestamp_index import ScopeType as TimestampScopeType, TimestampIndex +from hathor.indexes.tips_index import ScopeType as TipsScopeType, TipsIndex from hathor.indexes.tokens_index import TokensIndex from hathor.indexes.utxo_index import UtxoIndex from hathor.transaction import BaseTransaction @@ -42,12 +43,6 @@ MAX_CACHE_SIZE_DURING_LOAD = 1000 -class _IndexFilter(Enum): - ALL = auto() # block or tx, voided or not - ALL_BLOCKS = auto() # only blocks that are not voided - VALID_TXS = auto() # only transactions that are not voided - - class IndexesManager(ABC): """ IndexesManager manages all the indexes that we will have in the system @@ -87,29 +82,21 @@ def __init_checks__(self): def iter_all_indexes(self) -> Iterator[BaseIndex]: """ Iterate over all of the indexes abstracted by this manager, hiding their specific implementation details""" - for _, index in self._iter_all_indexes_with_filter(): - yield index - - def _iter_all_indexes_with_filter(self) -> Iterator[Tuple[_IndexFilter, BaseIndex]]: - """ Same as `iter_all_indexes()`, but includes a filter for what transactions an index is interested in.""" - yield _IndexFilter.ALL, self.info - yield _IndexFilter.ALL, self.all_tips - yield _IndexFilter.ALL_BLOCKS, self.block_tips - yield _IndexFilter.VALID_TXS, self.tx_tips - yield _IndexFilter.ALL, self.sorted_all - yield _IndexFilter.ALL_BLOCKS, self.sorted_blocks - yield _IndexFilter.VALID_TXS, self.sorted_txs - yield _IndexFilter.ALL, self.height - if self.deps is not None: - yield _IndexFilter.ALL, self.deps - if self.mempool_tips is not None: - yield _IndexFilter.ALL, self.mempool_tips - if self.addresses is not None: - yield _IndexFilter.ALL, self.addresses - if self.tokens is not None: - yield _IndexFilter.ALL, self.tokens - if self.utxo is not None: - yield _IndexFilter.ALL, self.utxo + return filter(None, [ + self.info, + self.all_tips, + self.block_tips, + self.tx_tips, + self.sorted_all, + self.sorted_blocks, + self.sorted_txs, + self.height, + self.deps, + self.mempool_tips, + self.addresses, + self.tokens, + self.utxo, + ]) @abstractmethod def enable_address_index(self, pubsub: 'PubSubManager') -> None: @@ -149,24 +136,23 @@ def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: db_last_started_at = tx_storage.get_last_started_at() - indexes_to_init: List[Tuple[_IndexFilter, BaseIndex]] = [] - for index_filter, index in self._iter_all_indexes_with_filter(): + indexes_to_init: List[BaseIndex] = [] + for index in self.iter_all_indexes(): index_db_name = index.get_db_name() if index_db_name is None: - indexes_to_init.append((index_filter, index)) + indexes_to_init.append(index) continue index_last_started_at = tx_storage.get_index_last_started_at(index_db_name) if db_last_started_at != index_last_started_at: - indexes_to_init.append((index_filter, index)) + indexes_to_init.append(index) if indexes_to_init: - self.log.info('there are indexes that need initialization', - indexes_to_init=[i for _, i in indexes_to_init]) + self.log.info('there are indexes that need initialization', indexes_to_init=indexes_to_init) else: self.log.info('there are no indexes that need initialization') # make sure that all the indexes that we're rebuilding are cleared - for _, index in indexes_to_init: + for index in indexes_to_init: index_db_name = index.get_db_name() if index_db_name: tx_storage.set_index_last_started_at(index_db_name, NULL_INDEX_LAST_STARTED_AT) @@ -184,27 +170,20 @@ def _manually_initialize(self, tx_storage: 'TransactionStorage') -> None: for index in self.iter_all_indexes(): index.init_start(self) - self.log.debug('indexes init') if indexes_to_init: - tx_iter = progress(tx_storage.topological_iterator(), log=self.log, total=tx_storage.get_vertices_count()) + overall_scope = reduce(operator.__or__, map(lambda i: i.get_scope(), indexes_to_init)) + tx_iter_inner = overall_scope.get_iterator(tx_storage) + tx_iter = progress(tx_iter_inner, log=self.log, total=tx_storage.get_vertices_count()) + self.log.debug('indexes init', scope=overall_scope) else: tx_iter = iter([]) - for tx in tx_iter: + self.log.debug('indexes init') - tx_meta = tx.get_metadata() + for tx in tx_iter: # feed each transaction to the indexes that they are interested in - for index_filter, index in indexes_to_init: - if index_filter is _IndexFilter.ALL: + for index in indexes_to_init: + if index.get_scope().matches(tx): index.init_loop_step(tx) - elif index_filter is _IndexFilter.ALL_BLOCKS: - if tx.is_block: - index.init_loop_step(tx) - elif index_filter is _IndexFilter.VALID_TXS: - # XXX: all indexes that use this filter treat soft-voided as voided, nothing special needed - if tx.is_transaction and not tx_meta.voided_by: - index.init_loop_step(tx) - else: - assert False, 'impossible filter' # Restore cache capacity. if isinstance(tx_storage, TransactionCacheStorage): @@ -307,13 +286,13 @@ def __init__(self) -> None: from hathor.indexes.memory_tips_index import MemoryTipsIndex self.info = MemoryInfoIndex() - self.all_tips = MemoryTipsIndex() - self.block_tips = MemoryTipsIndex() - self.tx_tips = MemoryTipsIndex() + self.all_tips = MemoryTipsIndex(scope_type=TipsScopeType.ALL) + self.block_tips = MemoryTipsIndex(scope_type=TipsScopeType.BLOCKS) + self.tx_tips = MemoryTipsIndex(scope_type=TipsScopeType.TXS) - self.sorted_all = MemoryTimestampIndex() - self.sorted_blocks = MemoryTimestampIndex() - self.sorted_txs = MemoryTimestampIndex() + self.sorted_all = MemoryTimestampIndex(scope_type=TimestampScopeType.ALL) + self.sorted_blocks = MemoryTimestampIndex(scope_type=TimestampScopeType.BLOCKS) + self.sorted_txs = MemoryTimestampIndex(scope_type=TimestampScopeType.TXS) self.addresses = None self.tokens = None @@ -362,13 +341,13 @@ def __init__(self, db: 'rocksdb.DB') -> None: self.info = RocksDBInfoIndex(self._db) self.height = RocksDBHeightIndex(self._db) - self.all_tips = PartialRocksDBTipsIndex(self._db, 'all') - self.block_tips = PartialRocksDBTipsIndex(self._db, 'blocks') - self.tx_tips = PartialRocksDBTipsIndex(self._db, 'txs') + self.all_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.ALL) + self.block_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.BLOCKS) + self.tx_tips = PartialRocksDBTipsIndex(self._db, scope_type=TipsScopeType.TXS) - self.sorted_all = RocksDBTimestampIndex(self._db, 'all') - self.sorted_blocks = RocksDBTimestampIndex(self._db, 'blocks') - self.sorted_txs = RocksDBTimestampIndex(self._db, 'txs') + self.sorted_all = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.ALL) + self.sorted_blocks = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.BLOCKS) + self.sorted_txs = RocksDBTimestampIndex(self._db, scope_type=TimestampScopeType.TXS) self.addresses = None self.tokens = None diff --git a/hathor/indexes/memory_address_index.py b/hathor/indexes/memory_address_index.py index f1cf3009b..3ab6b14f8 100644 --- a/hathor/indexes/memory_address_index.py +++ b/hathor/indexes/memory_address_index.py @@ -12,84 +12,45 @@ # See the License for the specific language governing permissions and # limitations under the License. -from collections import defaultdict -from typing import TYPE_CHECKING, DefaultDict, List, Optional, Set +from typing import TYPE_CHECKING, Iterable, List, Optional from structlog import get_logger from hathor.indexes.address_index import AddressIndex -from hathor.pubsub import HathorEvents +from hathor.indexes.memory_tx_group_index import MemoryTxGroupIndex from hathor.transaction import BaseTransaction if TYPE_CHECKING: # pragma: no cover - from hathor.pubsub import EventArguments, PubSubManager + from hathor.pubsub import PubSubManager logger = get_logger() -class MemoryAddressIndex(AddressIndex): +class MemoryAddressIndex(MemoryTxGroupIndex[str], AddressIndex): """ Index of inputs/outputs by address """ - index: DefaultDict[str, Set[bytes]] - def __init__(self, pubsub: Optional['PubSubManager'] = None) -> None: + super().__init__() self.pubsub = pubsub - self.force_clear() if self.pubsub: - self.subscribe_pubsub_events() + self._subscribe_pubsub_events() def get_db_name(self) -> Optional[str]: return None - def force_clear(self) -> None: - self.index = defaultdict(set) - - def subscribe_pubsub_events(self) -> None: - """ Subscribe wallet index to receive voided/winner tx pubsub events - """ - assert self.pubsub is not None - # Subscribe to voided/winner events - self.pubsub.subscribe(HathorEvents.CONSENSUS_TX_UPDATE, self.handle_tx_event) + def _extract_keys(self, tx: BaseTransaction) -> Iterable[str]: + return tx.get_related_addresses() def add_tx(self, tx: BaseTransaction) -> None: - """ Add tx inputs and outputs to the wallet index (indexed by its addresses). - """ - assert tx.hash is not None - - addresses = tx.get_related_addresses() - for address in addresses: - self.index[address].add(tx.hash) - - self.publish_tx(tx, addresses=addresses) - - def remove_tx(self, tx: BaseTransaction) -> None: - """ Remove tx inputs and outputs from the wallet index (indexed by its addresses). - """ - assert tx.hash is not None - - addresses = tx.get_related_addresses() - for address in addresses: - self.index[address].discard(tx.hash) - - def handle_tx_event(self, key: HathorEvents, args: 'EventArguments') -> None: - """ This method is called when pubsub publishes an event that we subscribed - """ - data = args.__dict__ - tx = data['tx'] - meta = tx.get_metadata() - if meta.has_voided_by_changed_since_last_call() or meta.has_spent_by_changed_since_last_call(): - self.publish_tx(tx) + super().add_tx(tx) + self._publish_tx(tx) def get_from_address(self, address: str) -> List[bytes]: - """ Get list of transaction hashes of an address - """ - return list(self.index[address]) + return list(self._get_from_key(address)) def get_sorted_from_address(self, address: str) -> List[bytes]: - """ Get a sorted list of transaction hashes of an address - """ - return sorted(self.index[address]) + return list(self._get_sorted_from_key(address)) def is_address_empty(self, address: str) -> bool: - return not bool(self.index[address]) + return self._is_key_empty(address) diff --git a/hathor/indexes/memory_timestamp_index.py b/hathor/indexes/memory_timestamp_index.py index d61e32677..523e1bb3e 100644 --- a/hathor/indexes/memory_timestamp_index.py +++ b/hathor/indexes/memory_timestamp_index.py @@ -17,7 +17,7 @@ from sortedcontainers import SortedKeyList from structlog import get_logger -from hathor.indexes.timestamp_index import RangeIdx, TimestampIndex +from hathor.indexes.timestamp_index import RangeIdx, ScopeType, TimestampIndex from hathor.indexes.utils import ( TransactionIndexElement, get_newer_sorted_key_list, @@ -35,7 +35,8 @@ class MemoryTimestampIndex(TimestampIndex): _index: 'SortedKeyList[TransactionIndexElement]' - def __init__(self) -> None: + def __init__(self, *, scope_type: ScopeType): + super().__init__(scope_type=scope_type) self.log = logger.new() self.force_clear() diff --git a/hathor/indexes/memory_tips_index.py b/hathor/indexes/memory_tips_index.py index 46ff14f61..b8b8c6310 100644 --- a/hathor/indexes/memory_tips_index.py +++ b/hathor/indexes/memory_tips_index.py @@ -18,7 +18,7 @@ from intervaltree import Interval, IntervalTree from structlog import get_logger -from hathor.indexes.tips_index import TipsIndex +from hathor.indexes.tips_index import ScopeType, TipsIndex from hathor.transaction import BaseTransaction logger = get_logger() @@ -47,7 +47,8 @@ class MemoryTipsIndex(TipsIndex): # It is useful because the interval tree allows access only by the interval. tx_last_interval: Dict[bytes, Interval] - def __init__(self) -> None: + def __init__(self, *, scope_type: ScopeType): + super().__init__(scope_type=scope_type) self.log = logger.new() self.tree = IntervalTree() self.tx_last_interval = {} diff --git a/hathor/indexes/memory_tx_group_index.py b/hathor/indexes/memory_tx_group_index.py new file mode 100644 index 000000000..752e04762 --- /dev/null +++ b/hathor/indexes/memory_tx_group_index.py @@ -0,0 +1,69 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from collections import defaultdict +from typing import DefaultDict, Iterable, Set, Sized, TypeVar + +from structlog import get_logger + +from hathor.indexes.tx_group_index import TxGroupIndex +from hathor.transaction import BaseTransaction +from hathor.util import not_none + +logger = get_logger() + +KT = TypeVar('KT', bound=Sized) + + +class MemoryTxGroupIndex(TxGroupIndex[KT]): + """Memory implementation of the TxGroupIndex. This class is abstract and cannot be used directly. + """ + + index: DefaultDict[KT, Set[bytes]] + + def __init__(self) -> None: + self.force_clear() + + def force_clear(self) -> None: + self.index = defaultdict(set) + + def _add_tx(self, key: KT, tx: BaseTransaction) -> None: + self.index[key].add(not_none(tx.hash)) + + @abstractmethod + def _extract_keys(self, tx: BaseTransaction) -> Iterable[KT]: + """Extract the keys related to a given tx. The transaction will be added to all extracted keys.""" + raise NotImplementedError + + def add_tx(self, tx: BaseTransaction) -> None: + assert tx.hash is not None + + for key in self._extract_keys(tx): + self._add_tx(key, tx) + + def remove_tx(self, tx: BaseTransaction) -> None: + assert tx.hash is not None + + for key in self._extract_keys(tx): + self.index[key].discard(tx.hash) + + def _get_from_key(self, key: KT) -> Iterable[bytes]: + yield from self.index[key] + + def _get_sorted_from_key(self, key: KT) -> Iterable[bytes]: + return sorted(self.index[key]) + + def _is_key_empty(self, key: KT) -> bool: + return not bool(self.index[key]) diff --git a/hathor/indexes/mempool_tips_index.py b/hathor/indexes/mempool_tips_index.py index 9c322fcd8..784327f69 100644 --- a/hathor/indexes/mempool_tips_index.py +++ b/hathor/indexes/mempool_tips_index.py @@ -19,16 +19,26 @@ import structlog from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction, Transaction from hathor.util import not_none if TYPE_CHECKING: # pragma: no cover from hathor.transaction.storage import TransactionStorage +SCOPE = Scope( + include_blocks=True, + include_txs=True, + include_voided=True, +) + class MempoolTipsIndex(BaseIndex): """Index to access the tips of the mempool transactions, which haven't been confirmed by a block.""" + def get_scope(self) -> Scope: + return SCOPE + def init_loop_step(self, tx: BaseTransaction) -> None: self.update(tx) diff --git a/hathor/indexes/partial_rocksdb_tips_index.py b/hathor/indexes/partial_rocksdb_tips_index.py index 8c59f3cb3..b41252d11 100644 --- a/hathor/indexes/partial_rocksdb_tips_index.py +++ b/hathor/indexes/partial_rocksdb_tips_index.py @@ -22,6 +22,7 @@ from hathor.indexes.memory_tips_index import MemoryTipsIndex from hathor.indexes.rocksdb_utils import RocksDBIndexUtils +from hathor.indexes.tips_index import ScopeType from hathor.util import LogDuration if TYPE_CHECKING: # pragma: no cover @@ -111,11 +112,11 @@ class PartialRocksDBTipsIndex(MemoryTipsIndex, RocksDBIndexUtils): # It is useful because the interval tree allows access only by the interval. tx_last_interval: Dict[bytes, Interval] - def __init__(self, db: 'rocksdb.DB', name: str) -> None: - MemoryTipsIndex.__init__(self) + def __init__(self, db: 'rocksdb.DB', *, scope_type: ScopeType): + MemoryTipsIndex.__init__(self, scope_type=scope_type) + self._name = scope_type.get_name() self.log = logger.new() # XXX: override MemoryTipsIndex logger so it shows the correct module - RocksDBIndexUtils.__init__(self, db, f'tips-{name}'.encode()) - self._name = name + RocksDBIndexUtils.__init__(self, db, f'tips-{self._name}'.encode()) def get_db_name(self) -> Optional[str]: return f'tips_{self._name}' diff --git a/hathor/indexes/rocksdb_address_index.py b/hathor/indexes/rocksdb_address_index.py index 88ecf3357..74f978fa7 100644 --- a/hathor/indexes/rocksdb_address_index.py +++ b/hathor/indexes/rocksdb_address_index.py @@ -12,19 +12,19 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Iterable, List, Optional, Tuple +from typing import TYPE_CHECKING, Iterable, List, Optional from structlog import get_logger from hathor.indexes.address_index import AddressIndex +from hathor.indexes.rocksdb_tx_group_index import RocksDBTxGroupIndex from hathor.indexes.rocksdb_utils import RocksDBIndexUtils -from hathor.pubsub import HathorEvents from hathor.transaction import BaseTransaction if TYPE_CHECKING: # pragma: no cover import rocksdb - from hathor.pubsub import EventArguments, PubSubManager + from hathor.pubsub import PubSubManager logger = get_logger() @@ -32,130 +32,42 @@ _DB_NAME: str = 'address' -class RocksDBAddressIndex(AddressIndex, RocksDBIndexUtils): +class RocksDBAddressIndex(RocksDBTxGroupIndex[str], AddressIndex, RocksDBIndexUtils): """ Index of inputs/outputs by address. + """ - This index uses rocksdb and the following key format: - - key = [address][tx.timestamp][tx.hash] - |--34b--||--4 bytes---||--32b--| - - It works nicely because rocksdb uses a tree sorted by key under the hood. + _KEY_SIZE = 34 - The timestamp must be serialized in big-endian, so ts1 > ts2 implies that bytes(ts1) > bytes(ts2), - hence the transactions are sorted by timestamp. - """ def __init__(self, db: 'rocksdb.DB', *, cf_name: Optional[bytes] = None, pubsub: Optional['PubSubManager'] = None) -> None: - self.log = logger.new() - RocksDBIndexUtils.__init__(self, db, cf_name or _CF_NAME_ADDRESS_INDEX) + RocksDBTxGroupIndex.__init__(self, db, cf_name or _CF_NAME_ADDRESS_INDEX) self.pubsub = pubsub if self.pubsub: - self.subscribe_pubsub_events() + self._subscribe_pubsub_events() + + def _serialize_key(self, key: str) -> bytes: + return key.encode('ascii') + + def _deserialize_key(self, key_bytes: bytes) -> str: + return key_bytes.decode('ascii') + + def _extract_keys(self, tx: BaseTransaction) -> Iterable[str]: + return tx.get_related_addresses() def get_db_name(self) -> Optional[str]: # XXX: we don't need it to be parametrizable, so this is fine return _DB_NAME - def force_clear(self) -> None: - self.clear() - - def _to_key(self, address: str, tx: Optional[BaseTransaction] = None) -> bytes: - import struct - assert len(address) == 34 - key = address.encode('ascii') - if tx: - assert tx.hash is not None - assert len(tx.hash) == 32 - key += struct.pack('>I', tx.timestamp) + tx.hash - assert len(key) == 34 + 4 + 32 - return key - - def _from_key(self, key: bytes) -> Tuple[str, int, bytes]: - import struct - assert len(key) == 34 + 4 + 32 - address = key[:34].decode('ascii') - timestamp: int - (timestamp,) = struct.unpack('>I', key[34:38]) - tx_hash = key[38:] - assert len(address) == 34 - assert len(tx_hash) == 32 - return address, timestamp, tx_hash - - def subscribe_pubsub_events(self) -> None: - """ Subscribe wallet index to receive voided/winner tx pubsub events - """ - assert self.pubsub is not None - # Subscribe to voided/winner events - self.pubsub.subscribe(HathorEvents.CONSENSUS_TX_UPDATE, self.handle_tx_event) - def add_tx(self, tx: BaseTransaction) -> None: - """ Add tx inputs and outputs to the wallet index (indexed by its addresses). - """ - assert tx.hash is not None - - addresses = tx.get_related_addresses() - for address in addresses: - self.log.debug('put address', address=address) - self._db.put((self._cf, self._to_key(address, tx)), b'') - - self.publish_tx(tx, addresses=addresses) - - def remove_tx(self, tx: BaseTransaction) -> None: - """ Remove tx inputs and outputs from the wallet index (indexed by its addresses). - """ - assert tx.hash is not None - - addresses = tx.get_related_addresses() - for address in addresses: - self.log.debug('delete address', address=address) - self._db.delete((self._cf, self._to_key(address, tx))) - - def handle_tx_event(self, key: HathorEvents, args: 'EventArguments') -> None: - """ This method is called when pubsub publishes an event that we subscribed - """ - data = args.__dict__ - tx = data['tx'] - meta = tx.get_metadata() - if meta.has_voided_by_changed_since_last_call() or meta.has_spent_by_changed_since_last_call(): - self.publish_tx(tx) - - def _get_from_address_iter(self, address: str) -> Iterable[bytes]: - self.log.debug('seek to', address=address) - it = self._db.iterkeys(self._cf) - it.seek(self._to_key(address)) - for _cf, key in it: - addr, _, tx_hash = self._from_key(key) - if addr != address: - break - self.log.debug('seek found', tx=tx_hash.hex()) - yield tx_hash - self.log.debug('seek end') + super().add_tx(tx) + self._publish_tx(tx) def get_from_address(self, address: str) -> List[bytes]: - """ Get list of transaction hashes of an address - """ - return list(self._get_from_address_iter(address)) + return list(self._get_from_key(address)) def get_sorted_from_address(self, address: str) -> List[bytes]: - """ Get a sorted list of transaction hashes of an address - """ - return list(self._get_from_address_iter(address)) + return list(self._get_sorted_from_key(address)) def is_address_empty(self, address: str) -> bool: - self.log.debug('seek to', address=address) - it = self._db.iterkeys(self._cf) - seek_key = self._to_key(address) - it.seek(seek_key) - cf_key = it.get() - if not cf_key: - return True - _cf, key = cf_key - # XXX: this means we reached the end it did not found any key - if key == seek_key: - return True - addr, _, _ = self._from_key(key) - is_empty = addr != address - self.log.debug('seek empty', is_empty=is_empty) - return is_empty + return self._is_key_empty(address) diff --git a/hathor/indexes/rocksdb_timestamp_index.py b/hathor/indexes/rocksdb_timestamp_index.py index a530b2453..01e4e609b 100644 --- a/hathor/indexes/rocksdb_timestamp_index.py +++ b/hathor/indexes/rocksdb_timestamp_index.py @@ -17,7 +17,7 @@ from structlog import get_logger from hathor.indexes.rocksdb_utils import RocksDBIndexUtils, incr_key -from hathor.indexes.timestamp_index import RangeIdx, TimestampIndex +from hathor.indexes.timestamp_index import RangeIdx, ScopeType, TimestampIndex from hathor.transaction import BaseTransaction from hathor.util import collect_n, skip_n @@ -38,10 +38,11 @@ class RocksDBTimestampIndex(TimestampIndex, RocksDBIndexUtils): It works nicely because rocksdb uses a tree sorted by key under the hood. """ - def __init__(self, db: 'rocksdb.DB', name: str) -> None: + def __init__(self, db: 'rocksdb.DB', *, scope_type: ScopeType): + TimestampIndex.__init__(self, scope_type=scope_type) + self._name = scope_type.get_name() self.log = logger.new() - RocksDBIndexUtils.__init__(self, db, f'timestamp-sorted-{name}'.encode()) - self._name = name + RocksDBIndexUtils.__init__(self, db, f'timestamp-sorted-{self._name}'.encode()) def get_db_name(self) -> Optional[str]: return f'timestamp_{self._name}' diff --git a/hathor/indexes/rocksdb_tx_group_index.py b/hathor/indexes/rocksdb_tx_group_index.py new file mode 100644 index 000000000..1706eb3b1 --- /dev/null +++ b/hathor/indexes/rocksdb_tx_group_index.py @@ -0,0 +1,140 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import TYPE_CHECKING, Iterable, Optional, Sized, Tuple, TypeVar + +from structlog import get_logger + +from hathor.indexes.rocksdb_utils import RocksDBIndexUtils +from hathor.indexes.tx_group_index import TxGroupIndex +from hathor.transaction import BaseTransaction + +if TYPE_CHECKING: # pragma: no cover + import rocksdb + +logger = get_logger() + +KT = TypeVar('KT', bound=Sized) + + +class RocksDBTxGroupIndex(TxGroupIndex[KT], RocksDBIndexUtils): + """RocksDB implementation of the TxGroupIndex. This class is abstract and cannot be used directly. + + Current implementation requires all keys to have the same size after serialization. + + This index uses rocksdb and the following key format: + + rocksdb_key = [key ][tx.timestamp][tx.hash] + |_KEY_SIZE||--4 bytes---||--32b--| + + It works nicely because rocksdb uses a tree sorted by key under the hood. + + The timestamp must be serialized in big-endian, so ts1 > ts2 implies that bytes(ts1) > bytes(ts2), + hence the transactions are sorted by timestamp. + """ + + _KEY_SIZE: int + _CF_NAME: bytes + + def __init__(self, db: 'rocksdb.DB', cf_name: bytes) -> None: + self.log = logger.new() + RocksDBIndexUtils.__init__(self, db, cf_name) + + def force_clear(self) -> None: + self.clear() + + @abstractmethod + def _serialize_key(self, key: KT) -> bytes: + """Serialize key, so it can be part of RockDB's key.""" + raise NotImplementedError + + @abstractmethod + def _deserialize_key(self, _bytes: bytes) -> KT: + """Deserialize RocksDB's key.""" + raise NotImplementedError + + @abstractmethod + def _extract_keys(self, tx: BaseTransaction) -> Iterable[KT]: + """Extract the keys related to a given tx. The transaction will be added to all extracted keys.""" + raise NotImplementedError + + def _to_rocksdb_key(self, key: KT, tx: Optional[BaseTransaction] = None) -> bytes: + import struct + rocksdb_key = self._serialize_key(key) + assert len(rocksdb_key) == self._KEY_SIZE + if tx: + assert tx.hash is not None + assert len(tx.hash) == 32 + rocksdb_key += struct.pack('>I', tx.timestamp) + tx.hash + assert len(rocksdb_key) == self._KEY_SIZE + 4 + 32 + return rocksdb_key + + def _from_rocksdb_key(self, rocksdb_key: bytes) -> Tuple[KT, int, bytes]: + import struct + assert len(rocksdb_key) == self._KEY_SIZE + 4 + 32 + key = self._deserialize_key(rocksdb_key[:self._KEY_SIZE]) + timestamp: int + (timestamp,) = struct.unpack('>I', rocksdb_key[self._KEY_SIZE:self._KEY_SIZE + 4]) + tx_hash = rocksdb_key[self._KEY_SIZE + 4:] + # Should we differentiate `_KEY_SIZE` and `_SERIALIZED_KEY_SIZE`? + # assert len(key) == self._KEY_SIZE + assert len(tx_hash) == 32 + return key, timestamp, tx_hash + + def add_tx(self, tx: BaseTransaction) -> None: + assert tx.hash is not None + + for key in self._extract_keys(tx): + self.log.debug('put key', key=key) + self._db.put((self._cf, self._to_rocksdb_key(key, tx)), b'') + + def remove_tx(self, tx: BaseTransaction) -> None: + assert tx.hash is not None + + for key in self._extract_keys(tx): + self.log.debug('delete key', key=key) + self._db.delete((self._cf, self._to_rocksdb_key(key, tx))) + + def _get_from_key(self, key: KT) -> Iterable[bytes]: + self.log.debug('seek to', key=key) + it = self._db.iterkeys(self._cf) + it.seek(self._to_rocksdb_key(key)) + for _cf, rocksdb_key in it: + key2, _, tx_hash = self._from_rocksdb_key(rocksdb_key) + if key2 != key: + break + self.log.debug('seek found', tx=tx_hash.hex()) + yield tx_hash + self.log.debug('seek end') + + def _get_sorted_from_key(self, key: KT) -> Iterable[bytes]: + return self._get_from_key(key) + + def _is_key_empty(self, key: KT) -> bool: + self.log.debug('seek to', key=key) + it = self._db.iterkeys(self._cf) + seek_key = self._to_rocksdb_key(key) + it.seek(seek_key) + cf_key = it.get() + if not cf_key: + return True + _cf, rocksdb_key = cf_key + # XXX: this means we reached the end it did not found any key + if rocksdb_key == seek_key: + return True + key2, _, _ = self._from_rocksdb_key(rocksdb_key) + is_empty = key2 != key + self.log.debug('seek empty', is_empty=is_empty) + return is_empty diff --git a/hathor/indexes/scope.py b/hathor/indexes/scope.py new file mode 100644 index 000000000..0a1e84c35 --- /dev/null +++ b/hathor/indexes/scope.py @@ -0,0 +1,91 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Iterator, NamedTuple + +from hathor.transaction.base_transaction import BaseTransaction + +if TYPE_CHECKING: # pragma: no cover + from hathor.transaction.storage import TransactionStorage + + +class Scope(NamedTuple): + """ This class models the scope of transactions that an index is interested in. + + It is used for both selecting the optimal iterator for all the indexes that need to be initialized and for + filtering which transactions are fed to the index. + """ + include_blocks: bool + include_txs: bool + include_voided: bool + # XXX: these have a default value since it should be really rare to have it different + include_partial: bool = False + topological_order: bool = True # if False than ordering doesn't matter + + # XXX: this is used to join the scope of multiple indexes to get an overall scope that includes everything that + # each individual scope needs, the OR operator was chosen because it represents well the operation of keeping + # a property if either A or B needs it + def __or__(self, other): + # XXX: note that this doesn't necessarily have to be OR operations between properties, we want the operations + # that broaden the scope, and not narrow it. + # XXX: in the case of topological_order, we want to keep the "topological" ordering if any of them requires it, + # so it also is an OR operator + return Scope( + include_blocks=self.include_blocks | other.include_blocks, + include_txs=self.include_txs | other.include_txs, + include_voided=self.include_voided | other.include_voided, + include_partial=self.include_partial | other.include_partial, + topological_order=self.topological_order | other.topological_order, + ) + + def matches(self, tx: BaseTransaction) -> bool: + """ Check if a transaction matches this scope, True means the index is interested in this transaction. + """ + if tx.is_block and not self.include_blocks: + return False + if tx.is_transaction and not self.include_txs: + return False + tx_meta = tx.get_metadata() + if tx_meta.voided_by and not self.include_voided: + return False + if not tx_meta.validation.is_fully_connected() and not self.include_partial: + return False + # XXX: self.topologial_order doesn't affect self.match() + # passed all checks + return True + + def get_iterator(self, tx_storage: 'TransactionStorage') -> Iterator[BaseTransaction]: + """ This method returns an iterator that only yields transaction that match the current scope. + """ + iterator: Iterator[BaseTransaction] + # XXX: this is to mark if the chosen iterator will yield partial transactions + iterator_covers_partial: bool + if self.topological_order: + iterator = tx_storage.topological_iterator() + iterator_covers_partial = False + else: + iterator = tx_storage.get_all_transactions() + iterator_covers_partial = True + for tx in iterator: + if self.matches(tx): + yield tx + if self.include_partial and not iterator_covers_partial: + # if partial transactions are needed and were not already covered, we use get_all_transactions, which + # includes partial transactions, to yield them, skipping all that aren't partial + for tx in tx_storage.get_all_transactions(): + tx_meta = tx.get_metadata() + if tx_meta.validation.is_fully_connected(): + continue + if self.matches(tx): + yield tx diff --git a/hathor/indexes/timestamp_index.py b/hathor/indexes/timestamp_index.py index e2dea623e..a738dfc47 100644 --- a/hathor/indexes/timestamp_index.py +++ b/hathor/indexes/timestamp_index.py @@ -13,16 +13,39 @@ # limitations under the License. from abc import abstractmethod +from enum import Enum from typing import Iterator, List, NamedTuple, Optional, Tuple from structlog import get_logger from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction logger = get_logger() +class ScopeType(Enum): + ALL = Scope( + include_blocks=True, + include_txs=True, + include_voided=True, + ) + TXS = Scope( + include_blocks=False, + include_txs=True, + include_voided=False, + ) + BLOCKS = Scope( + include_blocks=True, + include_txs=False, + include_voided=True, + ) + + def get_name(self) -> str: + return self.name.lower() + + class RangeIdx(NamedTuple): timestamp: int offset: int @@ -32,6 +55,12 @@ class TimestampIndex(BaseIndex): """ Index of transactions sorted by their timestamps. """ + def __init__(self, *, scope_type: ScopeType): + self._scope_type = scope_type + + def get_scope(self) -> Scope: + return self._scope_type.value + def init_loop_step(self, tx: BaseTransaction) -> None: self.add_tx(tx) diff --git a/hathor/indexes/tips_index.py b/hathor/indexes/tips_index.py index 1b85cc523..f9fe09c67 100644 --- a/hathor/indexes/tips_index.py +++ b/hathor/indexes/tips_index.py @@ -13,17 +13,40 @@ # limitations under the License. from abc import abstractmethod +from enum import Enum from typing import Set from intervaltree import Interval from structlog import get_logger from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction logger = get_logger() +class ScopeType(Enum): + ALL = Scope( + include_blocks=True, + include_txs=True, + include_voided=True, + ) + TXS = Scope( + include_blocks=False, + include_txs=True, + include_voided=False, + ) + BLOCKS = Scope( + include_blocks=True, + include_txs=False, + include_voided=True, + ) + + def get_name(self) -> str: + return self.name.lower() + + class TipsIndex(BaseIndex): """ Use an interval tree to quick get the tips at a given timestamp. @@ -38,6 +61,12 @@ class TipsIndex(BaseIndex): TODO Use an interval tree stored in disk, possibly using a B-tree. """ + def __init__(self, *, scope_type: ScopeType): + self._scope_type = scope_type + + def get_scope(self) -> Scope: + return self._scope_type.value + @abstractmethod def add_tx(self, tx: BaseTransaction) -> bool: """ Add a new transaction to the index diff --git a/hathor/indexes/tokens_index.py b/hathor/indexes/tokens_index.py index 27c38fa1d..9528ee32b 100644 --- a/hathor/indexes/tokens_index.py +++ b/hathor/indexes/tokens_index.py @@ -16,8 +16,15 @@ from typing import Iterator, List, NamedTuple, Optional, Tuple from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction +SCOPE = Scope( + include_blocks=False, + include_txs=True, + include_voided=True, +) + class TokenUtxoInfo(NamedTuple): tx_hash: bytes @@ -62,6 +69,9 @@ class TokensIndex(BaseIndex): """ Index of tokens by token uid """ + def get_scope(self) -> Scope: + return SCOPE + def init_loop_step(self, tx: BaseTransaction) -> None: tx_meta = tx.get_metadata() if tx_meta.voided_by: diff --git a/hathor/indexes/tx_group_index.py b/hathor/indexes/tx_group_index.py new file mode 100644 index 000000000..4041917f5 --- /dev/null +++ b/hathor/indexes/tx_group_index.py @@ -0,0 +1,59 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import abstractmethod +from typing import Generic, Iterable, Sized, TypeVar + +from structlog import get_logger + +from hathor.indexes.base_index import BaseIndex +from hathor.transaction import BaseTransaction + +logger = get_logger() + +KT = TypeVar('KT', bound=Sized) + + +class TxGroupIndex(BaseIndex, Generic[KT]): + """This is an abstract index to easily group transactions by key. Each transaction might belong to + more than one group. For example, when grouped by addresses, one transaction with five different + addresses would be added to five groups. + + Implementations using this index must extract a list of keys from each transaction. + """ + + @abstractmethod + def add_tx(self, tx: BaseTransaction) -> None: + """Add tx to this index.""" + raise NotImplementedError + + @abstractmethod + def remove_tx(self, tx: BaseTransaction) -> None: + """Remove tx from this index.""" + raise NotImplementedError + + @abstractmethod + def _get_from_key(self, key: KT) -> Iterable[bytes]: + """Get all transactions that have a given key.""" + raise NotImplementedError + + @abstractmethod + def _get_sorted_from_key(self, key: KT) -> Iterable[bytes]: + """Get all transactions that have a given key, sorted by timestamp.""" + raise NotImplementedError + + @abstractmethod + def _is_key_empty(self, key: KT) -> bool: + """Check whether a key is empty.""" + raise NotImplementedError diff --git a/hathor/indexes/utxo_index.py b/hathor/indexes/utxo_index.py index 3c788bca9..8606b2ba5 100644 --- a/hathor/indexes/utxo_index.py +++ b/hathor/indexes/utxo_index.py @@ -20,6 +20,7 @@ from hathor.conf import HathorSettings from hathor.indexes.base_index import BaseIndex +from hathor.indexes.scope import Scope from hathor.transaction import BaseTransaction, TxOutput from hathor.transaction.scripts import parse_address_script from hathor.util import sorted_merger @@ -27,6 +28,12 @@ logger = get_logger() settings = HathorSettings() +SCOPE = Scope( + include_blocks=True, + include_txs=True, + include_voided=True, +) + @dataclass(frozen=True) class UtxoIndexItem: @@ -104,6 +111,9 @@ def __init__(self): # interface methods provided by the base class + def get_scope(self) -> Scope: + return SCOPE + def init_loop_step(self, tx: BaseTransaction) -> None: self.update(tx) diff --git a/hathor/manager.py b/hathor/manager.py index 4df2fcb97..642c0d775 100644 --- a/hathor/manager.py +++ b/hathor/manager.py @@ -16,7 +16,7 @@ import sys import time from enum import Enum -from typing import Any, Iterable, Iterator, List, NamedTuple, Optional, Set, Tuple, Union +from typing import Any, Iterable, Iterator, List, NamedTuple, Optional, Tuple, Union from hathorlib.base_transaction import tx_or_block_from_bytes as lib_tx_or_block_from_bytes from structlog import get_logger @@ -81,37 +81,40 @@ class UnhealthinessReason(str, Enum): # This is the interval to be used by the task to check if the node is synced CHECK_SYNC_STATE_INTERVAL = 30 # seconds - def __init__(self, reactor: Reactor, peer_id: Optional[PeerId] = None, network: Optional[str] = None, - hostname: Optional[str] = None, pubsub: Optional[PubSubManager] = None, - wallet: Optional[BaseWallet] = None, tx_storage: Optional[TransactionStorage] = None, - event_storage: Optional[EventStorage] = None, - peer_storage: Optional[Any] = None, wallet_index: bool = False, utxo_index: bool = False, - stratum_port: Optional[int] = None, ssl: bool = True, - enable_sync_v1: bool = True, enable_sync_v2: bool = False, - capabilities: Optional[List[str]] = None, checkpoints: Optional[List[Checkpoint]] = None, - rng: Optional[Random] = None, soft_voided_tx_ids: Optional[Set[bytes]] = None, - environment_info: Optional[EnvironmentInfo] = None) -> None: + def __init__(self, + reactor: Reactor, + *, + pubsub: PubSubManager, + consensus_algorithm: ConsensusAlgorithm, + peer_id: PeerId, + tx_storage: TransactionStorage, + event_storage: EventStorage, + network: str, + hostname: Optional[str] = None, + wallet: Optional[BaseWallet] = None, + event_manager: Optional[EventManager] = None, + stratum_port: Optional[int] = None, + ssl: bool = True, + enable_sync_v1: bool = False, + enable_sync_v1_1: bool = True, + enable_sync_v2: bool = False, + capabilities: Optional[List[str]] = None, + checkpoints: Optional[List[Checkpoint]] = None, + rng: Optional[Random] = None, + environment_info: Optional[EnvironmentInfo] = None, + full_verification: bool = False): """ :param reactor: Twisted reactor which handles the mainloop and the events. - :param peer_id: Id of this node. If not given, a new one is created. + :param peer_id: Id of this node. :param network: Name of the network this node participates. Usually it is either testnet or mainnet. :type network: string :param hostname: The hostname of this node. It is used to generate its entrypoints. :type hostname: string - :param pubsub: If not given, a new one is created. - :type pubsub: :py:class:`hathor.pubsub.PubSubManager` - :param tx_storage: Required storage backend. :type tx_storage: :py:class:`hathor.transaction.storage.transaction_storage.TransactionStorage` - :param peer_storage: If not given, a new one is created. - :type peer_storage: :py:class:`hathor.p2p.peer_storage.PeerStorage` - - :param wallet_index: If should add a wallet index in the storage - :type wallet_index: bool - :param stratum_port: Stratum server port. Stratum server will only be created if it is not None. :type stratum_port: Optional[int] """ @@ -119,15 +122,14 @@ def __init__(self, reactor: Reactor, peer_id: Optional[PeerId] = None, network: from hathor.p2p.factory import HathorClientFactory, HathorServerFactory from hathor.p2p.manager import ConnectionsManager - if not (enable_sync_v1 or enable_sync_v2): + if not (enable_sync_v1 or enable_sync_v1_1 or enable_sync_v2): raise TypeError(f'{type(self).__name__}() at least one sync version is required') - if tx_storage is None: - raise TypeError(f'{type(self).__name__}() missing 1 required positional argument: \'tx_storage\'') - self._enable_sync_v1 = enable_sync_v1 self._enable_sync_v2 = enable_sync_v2 + self._cmd_path: Optional[str] = None + self.log = logger.new() if rng is None: @@ -148,8 +150,8 @@ def __init__(self, reactor: Reactor, peer_id: Optional[PeerId] = None, network: # Remote address, which can be different from local address. self.remote_address = None - self.my_peer = peer_id or PeerId() - self.network = network or 'testnet' + self.my_peer = peer_id + self.network = network self.is_started: bool = False @@ -165,30 +167,22 @@ def __init__(self, reactor: Reactor, peer_id: Optional[PeerId] = None, network: self.checkpoints_ready[0] = True # XXX Should we use a singleton or a new PeerStorage? [msbrogli 2018-08-29] - self.pubsub = pubsub or PubSubManager(self.reactor) + self.pubsub = pubsub self.tx_storage = tx_storage self.tx_storage.pubsub = self.pubsub - if wallet_index and self.tx_storage.with_index: - assert self.tx_storage.indexes is not None - self.log.debug('enable wallet indexes') - self.tx_storage.indexes.enable_address_index(self.pubsub) - self.tx_storage.indexes.enable_tokens_index() - if utxo_index and self.tx_storage.with_index: - assert self.tx_storage.indexes is not None - self.log.debug('enable utxo index') - self.tx_storage.indexes.enable_utxo_index() - self.event_manager: Optional[EventManager] = None - if event_storage is not None: - self.event_manager = EventManager(event_storage, self.reactor, not_none(self.my_peer.id)) - self.event_manager.subscribe(self.pubsub) + + self._event_manager = event_manager + + if self._event_manager: + assert self._event_manager.event_storage == event_storage + if enable_sync_v2: assert self.tx_storage.indexes is not None self.log.debug('enable sync-v2 indexes') self.tx_storage.indexes.enable_deps_index() self.tx_storage.indexes.enable_mempool_index() - self.soft_voided_tx_ids = soft_voided_tx_ids or set() - self.consensus_algorithm = ConsensusAlgorithm(self.soft_voided_tx_ids, pubsub=self.pubsub) + self.consensus_algorithm = consensus_algorithm self.peer_discoveries: List[PeerDiscovery] = [] @@ -197,7 +191,8 @@ def __init__(self, reactor: Reactor, peer_id: Optional[PeerId] = None, network: self.client_factory = HathorClientFactory(self.network, self.my_peer, node=self, use_ssl=ssl) self.connections = ConnectionsManager(self.reactor, self.my_peer, self.server_factory, self.client_factory, self.pubsub, self, ssl, whitelist_only=False, rng=self.rng, - enable_sync_v1=enable_sync_v1, enable_sync_v2=enable_sync_v2) + enable_sync_v1=enable_sync_v1, enable_sync_v2=enable_sync_v2, + enable_sync_v1_1=enable_sync_v1_1) self.metrics = Metrics( pubsub=self.pubsub, @@ -231,16 +226,12 @@ def __init__(self, reactor: Reactor, peer_id: Optional[PeerId] = None, network: # Full verification execute all validations for transactions and blocks when initializing the node # Can be activated on the command line with --full-verification - self._full_verification = False + self._full_verification = full_verification # Activated with --x-enable-event-queue flag # It activates the event mechanism inside full node self.enable_event_queue = False - # Activated with --x-retain-events flag. It will be ignored if --enable-event-queue is not provided - # It tells full node to retain all generated events. Otherwise, they will be deleted after retrieval - self.retain_events = False - # List of whitelisted peers self.peers_whitelist: List[str] = [] @@ -294,6 +285,9 @@ def start(self) -> None: ) sys.exit(-1) + if self._event_manager: + self._event_manager.start(not_none(self.my_peer.id)) + self.state = self.NodeState.INITIALIZING self.pubsub.publish(HathorEvents.MANAGER_ON_START) self.connections.start() @@ -361,6 +355,9 @@ def stop(self) -> Deferred: if wait_stratum: waits.append(wait_stratum) + if self._event_manager: + self._event_manager.stop() + self.tx_storage.flush() return defer.DeferredList(waits) @@ -399,6 +396,8 @@ def _initialize_components(self) -> None: This method runs through all transactions, verifying them and updating our wallet. """ + assert not self._event_manager, 'this method cannot be used if the events feature is enabled.' + self.log.info('initialize') if self.wallet: self.wallet._manually_initialize() @@ -419,7 +418,7 @@ def _initialize_components(self) -> None: # a database that already has the soft voided transaction before marking them in the metadata # Any new sync from the beginning should work fine or starting with the latest snapshot # that already has the soft voided transactions marked - for soft_voided_id in settings.SOFT_VOIDED_TX_IDS: + for soft_voided_id in self.consensus_algorithm.soft_voided_tx_ids: try: soft_voided_tx = self.tx_storage.get_transaction(soft_voided_id) except TransactionDoesNotExist: @@ -573,6 +572,7 @@ def _initialize_components(self) -> None: # self.stop_profiler(save_to='profiles/initializing.prof') self.state = self.NodeState.READY + total_load_time = LogDuration(t2 - t0) tx_rate = '?' if total_load_time == 0 else cnt / total_load_time @@ -611,7 +611,7 @@ def _initialize_components_new(self) -> None: # a database that already has the soft voided transaction before marking them in the metadata # Any new sync from the beginning should work fine or starting with the latest snapshot # that already has the soft voided transactions marked - for soft_voided_id in settings.SOFT_VOIDED_TX_IDS: + for soft_voided_id in self.consensus_algorithm.soft_voided_tx_ids: try: soft_voided_tx = self.tx_storage.get_transaction(soft_voided_id) except TransactionDoesNotExist: @@ -650,6 +650,7 @@ def _initialize_components_new(self) -> None: # XXX: last step before actually starting is updating the last started at timestamps self.tx_storage.update_last_started_at(started_at) self.state = self.NodeState.READY + self.pubsub.publish(HathorEvents.LOAD_FINISHED) t1 = time.time() total_load_time = LogDuration(t1 - t0) @@ -1226,6 +1227,14 @@ def check_sync_state(self): self.lc_check_sync_state.stop() + def set_cmd_path(self, path: str) -> None: + """Set the cmd path, where sysadmins can place files to communicate with the full node.""" + self._cmd_path = path + + def get_cmd_path(self) -> Optional[str]: + """Return the cmd path. If no cmd path is set, returns None.""" + return self._cmd_path + class ParentTxs(NamedTuple): """ Tuple where the `must_include` hash, when present (at most 1), must be included in a pair, and a list of hashes diff --git a/hathor/merged_mining/coordinator.py b/hathor/merged_mining/coordinator.py index 40c361e97..ec748857e 100644 --- a/hathor/merged_mining/coordinator.py +++ b/hathor/merged_mining/coordinator.py @@ -755,7 +755,7 @@ def job_request(self) -> None: merkle_root=bitcoin_block.merkle_root.hex()) async def estimator_loop(self) -> None: - """ This loop only cares about reducing the current difficulty if the miner takes too long to submit a solution. + """ This loop only cares about reducing the current difficulty if the miner takes too long to submit a solution """ from functools import reduce from math import log2 diff --git a/hathor/metrics.py b/hathor/metrics.py index d9c4cdb6d..cf56836ed 100644 --- a/hathor/metrics.py +++ b/hathor/metrics.py @@ -119,7 +119,7 @@ class Metrics: # Peers known known_peers: int = 0 - def __post_init__(self): + def __post_init__(self) -> None: self.log = logger.new() # Stores caculated tx weights saved in tx storage diff --git a/hathor/p2p/manager.py b/hathor/p2p/manager.py index 6d2061532..5ef2a7174 100644 --- a/hathor/p2p/manager.py +++ b/hathor/p2p/manager.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import TYPE_CHECKING, Any, Dict, Iterable, NamedTuple, Optional, Set, Union +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, NamedTuple, Optional, Set, Union from structlog import get_logger from twisted.internet import endpoints @@ -27,6 +27,7 @@ from hathor.p2p.peer_id import PeerId from hathor.p2p.peer_storage import PeerStorage from hathor.p2p.protocol import HathorProtocol +from hathor.p2p.rate_limiter import RateLimiter from hathor.p2p.states.ready import ReadyState from hathor.p2p.sync_factory import SyncManagerFactory from hathor.p2p.sync_version import SyncVersion @@ -48,6 +49,14 @@ WHITELIST_REQUEST_TIMEOUT = 45 +class _SyncRotateInfo(NamedTuple): + candidates: List[str] + old: Set[str] + new: Set[str] + to_disable: Set[str] + to_enable: Set[str] + + class _ConnectingPeer(NamedTuple): connection_string: str endpoint_deferred: Deferred @@ -63,6 +72,11 @@ class PeerConnectionsMetrics(NamedTuple): class ConnectionsManager: """ It manages all peer-to-peer connections and events related to control messages. """ + MAX_ENABLED_SYNC = settings.MAX_ENABLED_SYNC + SYNC_UPDATE_INTERVAL = settings.SYNC_UPDATE_INTERVAL + + class GlobalRateLimiter: + SEND_TIPS = 'NodeSyncTimestamp.send_tips' connections: Set[HathorProtocol] connected_peers: Dict[str, HathorProtocol] @@ -71,12 +85,16 @@ class ConnectionsManager: whitelist_only: bool _sync_factories: Dict[SyncVersion, SyncManagerFactory] + rate_limiter: RateLimiter + def __init__(self, reactor: Reactor, my_peer: PeerId, server_factory: 'HathorServerFactory', client_factory: 'HathorClientFactory', pubsub: PubSubManager, manager: 'HathorManager', - ssl: bool, rng: Random, whitelist_only: bool, enable_sync_v1: bool, enable_sync_v2: bool) -> None: + ssl: bool, rng: Random, whitelist_only: bool, enable_sync_v1: bool, enable_sync_v2: bool, + enable_sync_v1_1: bool) -> None: + from hathor.p2p.sync_v1_1_factory import SyncV11Factory from hathor.p2p.sync_v1_factory import SyncV1Factory - if not (enable_sync_v1 or enable_sync_v2): + if not (enable_sync_v1 or enable_sync_v1_1 or enable_sync_v2): raise TypeError(f'{type(self).__name__}() at least one sync version is required') self.log = logger.new() @@ -98,6 +116,10 @@ def __init__(self, reactor: Reactor, my_peer: PeerId, server_factory: 'HathorSer self.max_connections: int = settings.PEER_MAX_CONNECTIONS + # Global rate limiter for all connections. + self.rate_limiter = RateLimiter(self.reactor) + self.enable_rate_limiter() + # All connections. self.connections = set() @@ -121,6 +143,17 @@ def __init__(self, reactor: Reactor, my_peer: PeerId, server_factory: 'HathorSer self.lc_reconnect = LoopingCall(self.reconnect_to_all) self.lc_reconnect.clock = self.reactor + # A timer to update sync of all peers. + self.lc_sync_update = LoopingCall(self.sync_update) + self.lc_sync_update.clock = self.reactor + self.lc_sync_update_interval: float = 5 # seconds + + # Peers that always have sync enabled. + self.always_enable_sync: Set[str] = set() + + # Timestamp of the last time sync was updated. + self._last_sync_rotate: float = 0. + # A timer to try to reconnect to the disconnect known peers. if settings.ENABLE_PEER_WHITELIST: self.wl_reconnect = LoopingCall(self.update_whitelist) @@ -138,11 +171,26 @@ def __init__(self, reactor: Reactor, my_peer: PeerId, server_factory: 'HathorSer self._sync_factories = {} if enable_sync_v1: self._sync_factories[SyncVersion.V1] = SyncV1Factory(self) + if enable_sync_v1_1: + self._sync_factories[SyncVersion.V1_1] = SyncV11Factory(self) if enable_sync_v2: self._sync_factories[SyncVersion.V2] = SyncV1Factory(self) + def disable_rate_limiter(self) -> None: + """Disable global rate limiter.""" + self.rate_limiter.unset_limit(self.GlobalRateLimiter.SEND_TIPS) + + def enable_rate_limiter(self, max_hits: int = 16, window_seconds: float = 1) -> None: + """Enable global rate limiter. This method can be called to change the current rate limit.""" + self.rate_limiter.set_limit( + self.GlobalRateLimiter.SEND_TIPS, + max_hits, + window_seconds + ) + def start(self) -> None: self.lc_reconnect.start(5, now=False) + self.lc_sync_update.start(self.lc_sync_update_interval, now=False) if settings.ENABLE_PEER_WHITELIST: self._start_whitelist_reconnect() @@ -165,6 +213,9 @@ def stop(self) -> None: if self.lc_reconnect.running: self.lc_reconnect.stop() + if self.lc_sync_update.running: + self.lc_sync_update.stop() + def _get_peers_count(self) -> PeerConnectionsMetrics: """Get a dict containing the count of peers in each state""" @@ -280,6 +331,12 @@ def on_peer_ready(self, protocol: HathorProtocol) -> None: # In case it was a retry, we must reset the data only here, after it gets ready protocol.peer.reset_retry_timestamp() + if len(self.connected_peers) <= self.MAX_ENABLED_SYNC: + protocol.enable_sync() + + if protocol.peer.id in self.always_enable_sync: + protocol.enable_sync() + # Notify other peers about this new peer connection. for conn in self.iter_ready_connections(): if conn != protocol: @@ -560,3 +617,85 @@ def drop_connection_by_peer_id(self, peer_id: str) -> None: protocol = self.connected_peers.get(peer_id) if protocol: self.drop_connection(protocol) + + def sync_update(self) -> None: + """Update the subset of connections that running the sync algorithm.""" + try: + self._sync_rotate_if_needed() + except Exception: + self.log.error('_sync_rotate_if_needed failed', exc_info=True) + + def set_always_enable_sync(self, values: List[str]) -> None: + """Set a new list of peers to always enable sync. This operation completely replaces the previous list.""" + new: Set[str] = set(values) + + old = self.always_enable_sync + if new == old: + return + + to_enable = new - old + to_disable = old - new + + self.log.info('update always_enable_sync', new=new, to_enable=to_enable, to_disable=to_disable) + + for peer_id in new: + if peer_id not in self.connected_peers: + continue + self.connected_peers[peer_id].enable_sync() + + for peer_id in to_disable: + if peer_id not in self.connected_peers: + continue + self.connected_peers[peer_id].disable_sync() + + self.always_enable_sync = new + + def _calculate_sync_rotate(self) -> _SyncRotateInfo: + """Calculate new sync rotation.""" + current_enabled: Set[str] = set() + for peer_id, conn in self.connected_peers.items(): + if conn.is_sync_enabled(): + current_enabled.add(peer_id) + + candidates = list(self.connected_peers.keys()) + self.rng.shuffle(candidates) + selected_peers: Set[str] = set(candidates[:self.MAX_ENABLED_SYNC]) + + to_disable = current_enabled - selected_peers + to_enable = selected_peers - current_enabled + + # Do not disable peers in the `always_enable_sync`. + to_disable.difference_update(self.always_enable_sync) + + return _SyncRotateInfo( + candidates=candidates, + old=current_enabled, + new=selected_peers, + to_disable=to_disable, + to_enable=to_enable, + ) + + def _sync_rotate_if_needed(self, *, force: bool = False) -> None: + """Rotate peers who we are syncing from.""" + now = self.reactor.seconds() + dt = now - self._last_sync_rotate + if not force and dt < self.SYNC_UPDATE_INTERVAL: + return + self._last_sync_rotate = now + + info = self._calculate_sync_rotate() + + self.log.info( + 'sync rotate', + candidates=len(info.candidates), + old=info.old, + new=info.new, + to_enable=info.to_enable, + to_disable=info.to_disable, + ) + + for peer_id in info.to_disable: + self.connected_peers[peer_id].disable_sync() + + for peer_id in info.to_enable: + self.connected_peers[peer_id].enable_sync() diff --git a/hathor/p2p/node_sync.py b/hathor/p2p/node_sync.py index 3a035cd2a..da2726c3f 100644 --- a/hathor/p2p/node_sync.py +++ b/hathor/p2p/node_sync.py @@ -40,6 +40,7 @@ from twisted.python.failure import Failure # noqa: F401 from hathor.p2p.protocol import HathorProtocol # noqa: F401 + from hathor.p2p.rate_limiter import RateLimiter def _get_deps(tx: BaseTransaction) -> Iterator[bytes]: @@ -198,6 +199,11 @@ def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: reactor = twisted_reactor self.reactor: Reactor = reactor + # Rate limit for this connection. + assert protocol.connections is not None + self.global_rate_limiter: 'RateLimiter' = protocol.connections.rate_limiter + self.GlobalRateLimiter = protocol.connections.GlobalRateLimiter + self.call_later_id: Optional[IDelayedCall] = None self.call_later_interval: int = 1 # seconds @@ -223,6 +229,10 @@ def __init__(self, protocol: 'HathorProtocol', downloader: Downloader, reactor: # Indicate whether the sync manager has been started. self._started: bool = False + # Indicate whether the synchronization is enabled. + # When the sync is disabled, it will keep the last synced_timestamp. + self.is_enabled: bool = False + # Indicate whether the synchronization is running. self.is_running: bool = False @@ -233,6 +243,7 @@ def get_status(self): """ Return the status of the sync. """ return { + 'is_enabled': self.is_enabled, 'latest_timestamp': self.peer_timestamp, 'synced_timestamp': self.synced_timestamp, } @@ -296,6 +307,8 @@ def is_errored(self) -> bool: return False def send_tx_to_peer_if_possible(self, tx: BaseTransaction) -> None: + if not self.is_enabled: + return if self.peer_timestamp is None: return if self.synced_timestamp is None: @@ -431,7 +444,9 @@ def find_synced_timestamp(self) -> Generator[Deferred, Any, Optional[int]]: step = 1 while tips.merkle_tree != local_merkle_tree: if cur <= self.manager.tx_storage.first_timestamp: - raise Exception('We cannot go before genesis. Is it an attacker?!') + raise Exception( + 'We cannot go before genesis. Peer is probably running with wrong configuration or database.' + ) prev_cur = cur assert self.manager.tx_storage.first_timestamp > 0 cur = max(cur - step, self.manager.tx_storage.first_timestamp) @@ -474,6 +489,9 @@ def find_synced_timestamp(self) -> Generator[Deferred, Any, Optional[int]]: def _next_step(self) -> Generator[Deferred, Any, None]: """ Run the next step to keep nodes synced. """ + if not self.is_enabled: + self.log.debug('sync is disabled') + return if not self.is_running or not self._started: self.log.debug('already stopped') return @@ -492,6 +510,11 @@ def next_step(self) -> Generator[Deferred, Any, None]: self.log.debug('already running') return + if not self.is_enabled: + self.log.debug('sync is disabled') + self.schedule_next_step_call() + return + try: self.is_running = True yield self._next_step() @@ -499,12 +522,16 @@ def next_step(self) -> Generator[Deferred, Any, None]: self.log.warn('_next_step error', exc_info=True) raise else: - if self.call_later_id and self.call_later_id.active(): - self.call_later_id.cancel() - self.call_later_id = self.reactor.callLater(self.call_later_interval, self.next_step) + self.schedule_next_step_call() finally: self.is_running = False + def schedule_next_step_call(self) -> None: + """Schedule `next_step()` call.""" + if self.call_later_id and self.call_later_id.active(): + self.call_later_id.cancel() + self.call_later_id = self.reactor.callLater(self.call_later_interval, self.next_step) + def send_message(self, cmd: ProtocolMessages, payload: Optional[str] = None) -> None: """ Helper to send a message. """ @@ -587,6 +614,14 @@ def handle_get_tips(self, payload: str) -> None: self.send_tips(args.timestamp, args.include_hashes, args.offset) def send_tips(self, timestamp: Optional[int] = None, include_hashes: bool = False, offset: int = 0) -> None: + """Try to send a TIPS message. If rate limit has been reached, it schedules to send it later.""" + if not self.global_rate_limiter.add_hit(self.GlobalRateLimiter.SEND_TIPS): + self.log.debug('send_tips throttled') + self.reactor.callLater(1, self.send_tips, timestamp, include_hashes, offset) + return + self._send_tips(timestamp, include_hashes, offset) + + def _send_tips(self, timestamp: Optional[int] = None, include_hashes: bool = False, offset: int = 0) -> None: """ Send a TIPS message. """ if timestamp is None: @@ -765,3 +800,15 @@ def on_get_data_failed(self, reason: 'Failure', hash_bytes: bytes) -> None: We should just log a warning because it will continue the sync and will try to get this tx again. """ self.log.warn('failed to download tx', tx=hash_bytes.hex(), reason=reason) + + def is_sync_enabled(self) -> bool: + """Return True if sync is enabled for this connection.""" + return self.is_enabled + + def enable_sync(self) -> None: + """Enable sync for this connection.""" + self.is_enabled = True + + def disable_sync(self) -> None: + """Disable sync for this connection.""" + self.is_enabled = False diff --git a/hathor/p2p/protocol.py b/hathor/p2p/protocol.py index f4020a0ef..a4e09c244 100644 --- a/hathor/p2p/protocol.py +++ b/hathor/p2p/protocol.py @@ -134,7 +134,7 @@ def __init__(self, network: str, my_peer: PeerId, connections: Optional['Connect self.state: Optional[BaseState] = None # Default rate limit - self.ratelimit: RateLimiter = RateLimiter() + self.ratelimit: RateLimiter = RateLimiter(self.reactor) # self.ratelimit.set_limit(self.RateLimitKeys.GLOBAL, 120, 60) # Connection string of the peer @@ -296,7 +296,12 @@ def recv_message(self, cmd: ProtocolMessages, payload: str) -> Optional[Deferred self.reset_idle_timeout() if not self.ratelimit.add_hit(self.RateLimitKeys.GLOBAL): - self.state.send_throttle(self.RateLimitKeys.GLOBAL) + # XXX: on Python 3.11 the result of the following expression: + # '{}'.format(HathorProtocol.RateLimitKeys.GLOBAL) + # is not 'global' but 'RateLimitKeys.GLOBAL', even though the enum value *is* a string, but it seems + # that something like `str(value)` is called which results in a different value (usually not the case + # for regular strings, but it is for enum+str), using `enum_variant.value` side-steps this problem + self.state.send_throttle(self.RateLimitKeys.GLOBAL.value) return None fn = self.state.cmd_map.get(cmd) @@ -355,6 +360,27 @@ def handle_error(self, payload: str) -> None: """ self.log.warn('remote error', payload=payload) + def is_sync_enabled(self) -> bool: + """Return true if sync is enabled for this connection.""" + if not self.is_state(self.PeerState.READY): + return False + assert isinstance(self.state, ReadyState) + return self.state.sync_manager.is_sync_enabled() + + def enable_sync(self) -> None: + """Enable sync for this connection.""" + assert self.is_state(self.PeerState.READY) + assert isinstance(self.state, ReadyState) + self.log.info('enable sync') + self.state.sync_manager.enable_sync() + + def disable_sync(self) -> None: + """Disable sync for this connection.""" + assert self.is_state(self.PeerState.READY) + assert isinstance(self.state, ReadyState) + self.log.info('disable sync') + self.state.sync_manager.disable_sync() + class HathorLineReceiver(LineReceiver, HathorProtocol): """ Implements HathorProtocol in a LineReceiver protocol. diff --git a/hathor/p2p/rate_limiter.py b/hathor/p2p/rate_limiter.py index 4535f9a81..3d87fb657 100644 --- a/hathor/p2p/rate_limiter.py +++ b/hathor/p2p/rate_limiter.py @@ -40,7 +40,7 @@ def __init__(self, reactor: Optional[Reactor] = None): reactor = twisted_reactor self.reactor = reactor - def set_limit(self, key: str, max_hits: int, window_seconds: int) -> None: + def set_limit(self, key: str, max_hits: int, window_seconds: float) -> None: """ Set a limit to a given key, e.g., `max_hits = 10` and `window_seconds = 60` means at most 10 hits per minute. diff --git a/hathor/p2p/resources/status.py b/hathor/p2p/resources/status.py index bcd72826f..fdefb58a5 100644 --- a/hathor/p2p/resources/status.py +++ b/hathor/p2p/resources/status.py @@ -81,6 +81,15 @@ def render_GET(self, request): }) app = 'Hathor v{}'.format(hathor.__version__) + + best_block_tips = [] + for tip in self.manager.tx_storage.get_best_block_tips(): + tx = self.manager.tx_storage.get_transaction(tip) + meta = tx.get_metadata() + best_block_tips.append({'hash': tx.hash_hex, 'height': meta.height}) + + best_block = self.manager.tx_storage.get_best_block() + data = { 'server': { 'id': self.manager.connections.my_peer.id, @@ -100,6 +109,11 @@ def render_GET(self, request): 'dag': { 'first_timestamp': self.manager.tx_storage.first_timestamp, 'latest_timestamp': self.manager.tx_storage.latest_timestamp, + 'best_block_tips': best_block_tips, + 'best_block': { + 'hash': best_block.hash_hex, + 'height': best_block.get_metadata().height, + }, } } return json_dumpb(data) @@ -170,7 +184,17 @@ def render_GET(self, request): }, 'dag': { 'first_timestamp': 1539271481, - 'latest_timestamp': 1539271483 + 'latest_timestamp': 1539271483, + 'best_block_tips': [ + { + 'hash': '000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038', # noqa: E501 + 'height': 0 + } + ], + 'best_block': { + 'hash': '000007eb968a6cdf0499e2d033faf1e163e0dc9cf41876acad4d421836972038', # noqa: E501 + 'height': 0 + } } } } diff --git a/hathor/p2p/states/peer_id.py b/hathor/p2p/states/peer_id.py index 6384c9a00..73ac42768 100644 --- a/hathor/p2p/states/peer_id.py +++ b/hathor/p2p/states/peer_id.py @@ -21,7 +21,6 @@ from hathor.p2p.messages import ProtocolMessages from hathor.p2p.peer_id import PeerId from hathor.p2p.states.base import BaseState -from hathor.p2p.sync_version import SyncVersion from hathor.util import json_dumps, json_loads if TYPE_CHECKING: @@ -156,8 +155,8 @@ def _should_block_peer(self, peer_id: str) -> bool: # when ENABLE_PEER_WHITELIST is set, we check if we're on sync-v1 to block non-whitelisted peers if settings.ENABLE_PEER_WHITELIST: - protocol_is_v1 = self.protocol.sync_version is SyncVersion.V1 - if protocol_is_v1 and not peer_is_whitelisted: + assert self.protocol.sync_version is not None + if self.protocol.sync_version.is_v1() and not peer_is_whitelisted: return True # otherwise we block non-whitelisted peers when on "whitelist-only mode" diff --git a/hathor/p2p/sync_manager.py b/hathor/p2p/sync_manager.py index 076044fad..25672bed6 100644 --- a/hathor/p2p/sync_manager.py +++ b/hathor/p2p/sync_manager.py @@ -54,3 +54,15 @@ def is_synced(self) -> bool: def is_errored(self) -> bool: """Whether the manager entered an error state""" raise NotImplementedError + + def is_sync_enabled(self) -> bool: + """Return true if the sync is enabled.""" + raise NotImplementedError + + def enable_sync(self) -> None: + """Enable sync.""" + raise NotImplementedError + + def disable_sync(self) -> None: + """Disable sync.""" + raise NotImplementedError diff --git a/hathor/p2p/sync_v1_1_factory.py b/hathor/p2p/sync_v1_1_factory.py new file mode 100644 index 000000000..99af53ff6 --- /dev/null +++ b/hathor/p2p/sync_v1_1_factory.py @@ -0,0 +1,33 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Optional + +from hathor.p2p.downloader import Downloader +from hathor.p2p.manager import ConnectionsManager +from hathor.p2p.node_sync import NodeSyncTimestamp +from hathor.p2p.sync_factory import SyncManagerFactory +from hathor.p2p.sync_manager import SyncManager +from hathor.util import Reactor + +if TYPE_CHECKING: + from hathor.p2p.protocol import HathorProtocol + + +class SyncV11Factory(SyncManagerFactory): + def __init__(self, connections: ConnectionsManager): + self.downloader = Downloader(connections.manager) + + def create_sync_manager(self, protocol: 'HathorProtocol', reactor: Optional[Reactor] = None) -> SyncManager: + return NodeSyncTimestamp(protocol, downloader=self.downloader, reactor=reactor) diff --git a/hathor/p2p/sync_version.py b/hathor/p2p/sync_version.py index 906ef8611..8c9ab7ee6 100644 --- a/hathor/p2p/sync_version.py +++ b/hathor/p2p/sync_version.py @@ -23,6 +23,7 @@ class SyncVersion(Enum): # example, peers using `v2-fake` (which just uses sync-v1) will not connect to peers using `v2-alpha`, and so # on. V1 = 'v1' + V1_1 = 'v1.1' V2 = 'v2-fake' # uses sync-v1 to mock sync-v2 def __str__(self): @@ -39,11 +40,17 @@ def get_priority(self) -> int: if self is SyncVersion.V1: # low priority return 10 + elif self is SyncVersion.V1_1: + return 11 elif self is SyncVersion.V2: return 20 else: raise ValueError('value is either invalid for this enum or not implemented') + def is_v1(self) -> bool: + """Return True for V1 and V1_1.""" + return self.get_priority() < 20 + # XXX: total_ordering decorator will implement the other methods: __le__, __gt__, and __ge__ def __lt__(self, other): """Used to sort versions by considering the value on get_priority.""" diff --git a/hathor/pubsub.py b/hathor/pubsub.py index 77a69064b..255088f80 100644 --- a/hathor/pubsub.py +++ b/hathor/pubsub.py @@ -14,12 +14,15 @@ from collections import defaultdict, deque from enum import Enum -from typing import Any, Callable, Deque, Dict, List, Tuple, cast +from typing import TYPE_CHECKING, Any, Callable, Deque, Dict, List, Tuple, cast from twisted.internet.interfaces import IReactorFromThreads from hathor.util import Reactor, ReactorThread +if TYPE_CHECKING: + from hathor.transaction import BaseTransaction, Block + class HathorEvents(Enum): """ @@ -31,18 +34,6 @@ class HathorEvents(Enum): Triggered when a peer connection to the network fails Publishes the peer id and the peers count - NETWORK_NEW_TX_VOIDED - Triggered when a new transaction is voided in the network - Publishes a tx object - - NETWORK_BEST_BLOCK_FOUND - Triggered when a new block is accepted in the network - Publishes a block object - - NETWORK_ORPHAN_BLOCK_FOUND - Triggered when a new block is voided in the network - Publishes a block object - NETWORK_PEER_CONNECTED: Triggered when a new peer connects to the network Publishes the peer protocol and the peers count @@ -91,9 +82,6 @@ class HathorEvents(Enum): WALLET_ELEMENT_VOIDED: Triggered when a wallet element is marked as voided - LOAD_STARTED - Triggered when manager has started reading data from the local database - LOAD_FINISHED Triggered when manager finishes reading local data and it is ready to sync @@ -103,11 +91,8 @@ class HathorEvents(Enum): REORG_FINISHED Triggered when consensus algorithm ends all changes involved in a reorg - TX_METADATA_CHANGED - Triggered when consensus algorithm changes a metadata of an existing transaction - - BLOCK_METADATA_CHANGED - Triggered when consensus algorithm changes a metadata from an existing block + VERTEX_METADATA_CHANGED + Triggered when consensus algorithm changes a metadata of an existing vertex (transaction or block) """ MANAGER_ON_START = 'manager:on_start' MANAGER_ON_STOP = 'manager:on_stop' @@ -126,12 +111,6 @@ class HathorEvents(Enum): CONSENSUS_TX_REMOVED = 'consensus:tx_removed' - NETWORK_NEW_TX_VOIDED = 'network:new_tx_voided' - - NETWORK_BEST_BLOCK_FOUND = 'network:best_block_found' - - NETWORK_ORPHAN_BLOCK_FOUND = 'network:orphan_block_found' - WALLET_OUTPUT_RECEIVED = 'wallet:output_received' WALLET_INPUT_SPENT = 'wallet:output_spent' @@ -150,23 +129,24 @@ class HathorEvents(Enum): WALLET_ELEMENT_VOIDED = 'wallet:element_voided' - LOAD_STARTED = 'manager:load_started' - LOAD_FINISHED = 'manager:load_finished' REORG_STARTED = 'reorg:started' REORG_FINISHED = 'reorg:finished' - TX_METADATA_CHANGED = 'tx:metadata_changed' - - BLOCK_METADATA_CHANGED = 'block:metadata_changed' - class EventArguments: """Simple object for storing event arguments. """ + # XXX: add these as needed, these attributes don't always exist, but when they do these are their types + tx: 'BaseTransaction' + reorg_size: int + old_best_block: 'Block' + new_best_block: 'Block' + common_block: 'Block' + def __init__(self, **kwargs: Any) -> None: for key, value in kwargs.items(): setattr(self, key, value) diff --git a/hathor/simulator/__init__.py b/hathor/simulator/__init__.py index 2a9fdd0a1..c44956c67 100644 --- a/hathor/simulator/__init__.py +++ b/hathor/simulator/__init__.py @@ -14,13 +14,11 @@ from hathor.simulator.fake_connection import FakeConnection -from hathor.simulator.miner import MinerSimulator from hathor.simulator.simulator import Simulator from hathor.simulator.tx_generator import RandomTransactionGenerator __all__ = [ 'FakeConnection', - 'MinerSimulator', 'RandomTransactionGenerator', 'Simulator', ] diff --git a/hathor/simulator/miner.py b/hathor/simulator/miner.py deleted file mode 100644 index b7754179d..000000000 --- a/hathor/simulator/miner.py +++ /dev/null @@ -1,106 +0,0 @@ -# Copyright 2021 Hathor Labs -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from typing import TYPE_CHECKING - -from structlog import get_logger - -from hathor.conf import HathorSettings -from hathor.manager import HathorEvents -from hathor.util import Random - -if TYPE_CHECKING: - from hathor.manager import HathorManager - from hathor.pubsub import EventArguments - -settings = HathorSettings() -logger = get_logger() - - -class MinerSimulator: - """ Simulate block mining with actually solving the block. It is supposed to be used - with Simulator class. The mining part is simulated using the geometrical distribution. - """ - def __init__(self, manager: 'HathorManager', rng: Random, *, hashpower: float): - """ - :param: hashpower: Number of hashes per second - """ - self.blocks_found = 0 - self.manager = manager - self.hashpower = hashpower - self.clock = manager.reactor - self.block = None - self.delayedcall = None - self.log = logger.new() - self.rng = rng - - def start(self) -> None: - """ Start mining blocks. - """ - self.manager.pubsub.subscribe(HathorEvents.NETWORK_NEW_TX_ACCEPTED, self.on_new_tx) - self.schedule_next_block() - - def stop(self) -> None: - """ Stop mining blocks. - """ - if self.delayedcall: - self.delayedcall.cancel() - self.delayedcall = None - self.manager.pubsub.unsubscribe(HathorEvents.NETWORK_NEW_TX_ACCEPTED, self.on_new_tx) - - def on_new_tx(self, key: HathorEvents, args: 'EventArguments') -> None: - """ Called when a new tx or block is received. It updates the current mining to the - new block. - """ - tx = args.tx # type: ignore - if not tx.is_block: - return - if not self.block: - return - - tips = tx.storage.get_best_block_tips() - if self.block.parents[0] not in tips: - # Head changed - self.block = None - self.schedule_next_block() - - def schedule_next_block(self): - """ Schedule the propagation of the next block, and propagate a block if it has been found. - """ - if self.block: - self.block.nonce = self.rng.getrandbits(32) - self.block.update_hash() - self.blocks_found += 1 - self.log.debug('randomized step: found new block', hash=self.block.hash_hex, nonce=self.block.nonce) - self.manager.propagate_tx(self.block, fails_silently=False) - self.block = None - - if self.manager.can_start_mining(): - block = self.manager.generate_mining_block() - geometric_p = 2**(-block.weight) - trials = self.rng.geometric(geometric_p) - dt = 1.0 * trials / self.hashpower - self.block = block - self.log.debug('randomized step: start mining new block', dt=dt, parents=[h.hex() for h in block.parents], - block_timestamp=block.timestamp) - else: - dt = 60 - - if dt > settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: - self.block = None - dt = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE - - if self.delayedcall and self.delayedcall.active(): - self.delayedcall.cancel() - self.delayedcall = self.clock.callLater(dt, self.schedule_next_block) diff --git a/hathor/simulator/miner/__init__.py b/hathor/simulator/miner/__init__.py new file mode 100644 index 000000000..42d8e44f7 --- /dev/null +++ b/hathor/simulator/miner/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.simulator.miner.abstract_miner import AbstractMiner +from hathor.simulator.miner.geometric_miner import GeometricMiner + +__all__ = [ + 'AbstractMiner', + 'GeometricMiner', +] diff --git a/hathor/simulator/miner/abstract_miner.py b/hathor/simulator/miner/abstract_miner.py new file mode 100644 index 000000000..6a6105ba2 --- /dev/null +++ b/hathor/simulator/miner/abstract_miner.py @@ -0,0 +1,69 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Optional + +from structlog import get_logger +from twisted.internet.interfaces import IDelayedCall + +from hathor.manager import HathorManager +from hathor.pubsub import EventArguments, HathorEvents +from hathor.util import Random + +logger = get_logger() + + +class AbstractMiner(ABC): + """Abstract class to represent miner simulators.""" + + _manager: HathorManager + _rng: Random + _delayed_call: Optional[IDelayedCall] = None + + def __init__(self, manager: HathorManager, rng: Random): + self._manager = manager + self._rng = rng + + self._clock = self._manager.reactor + + self.log = logger.new() + + def start(self) -> None: + """Start mining blocks.""" + self._manager.pubsub.subscribe(HathorEvents.NETWORK_NEW_TX_ACCEPTED, self._on_new_tx) + + self._schedule_next_block() + + def stop(self) -> None: + """Stop mining blocks.""" + if self._delayed_call: + self._delayed_call.cancel() + self._delayed_call = None + + self._manager.pubsub.unsubscribe(HathorEvents.NETWORK_NEW_TX_ACCEPTED, self._on_new_tx) + + @abstractmethod + def _on_new_tx(self, key: HathorEvents, args: EventArguments) -> None: + """Called when a new tx or block is received.""" + raise NotImplementedError + + @abstractmethod + def _schedule_next_block(self): + """Schedule the propagation of the next block, and propagate a block if it has been found.""" + raise NotImplementedError + + @abstractmethod + def get_blocks_found(self) -> int: + raise NotImplementedError diff --git a/hathor/simulator/miner/geometric_miner.py b/hathor/simulator/miner/geometric_miner.py new file mode 100644 index 000000000..68e8e8c85 --- /dev/null +++ b/hathor/simulator/miner/geometric_miner.py @@ -0,0 +1,90 @@ +# Copyright 2021 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING, Optional + +from hathor.conf import HathorSettings +from hathor.manager import HathorEvents +from hathor.simulator.miner.abstract_miner import AbstractMiner +from hathor.util import Random + +if TYPE_CHECKING: + from hathor.manager import HathorManager + from hathor.pubsub import EventArguments + from hathor.transaction import Block + +settings = HathorSettings() + + +class GeometricMiner(AbstractMiner): + """ Simulate block mining with actually solving the block. It is supposed to be used + with Simulator class. The mining part is simulated using the geometrical distribution. + """ + def __init__(self, manager: 'HathorManager', rng: Random, *, hashpower: float): + """ + :param: hashpower: Number of hashes per second + """ + super().__init__(manager, rng) + + self._hashpower = hashpower + self._block: Optional[Block] = None + self._blocks_found: int = 0 + + def _on_new_tx(self, key: HathorEvents, args: 'EventArguments') -> None: + """ Called when a new tx or block is received. It updates the current mining to the + new block. + """ + tx = args.tx + if not tx.is_block: + return + if not self._block: + return + + assert tx.storage is not None + tips = tx.storage.get_best_block_tips() + if self._block.parents[0] not in tips: + # Head changed + self._block = None + self._schedule_next_block() + + def _schedule_next_block(self): + if self._block: + self._block.nonce = self._rng.getrandbits(32) + self._block.update_hash() + self.log.debug('randomized step: found new block', hash=self._block.hash_hex, nonce=self._block.nonce) + self._manager.propagate_tx(self._block, fails_silently=False) + self._blocks_found += 1 + self._block = None + + if self._manager.can_start_mining(): + block = self._manager.generate_mining_block() + geometric_p = 2**(-block.weight) + trials = self._rng.geometric(geometric_p) + dt = 1.0 * trials / self._hashpower + self._block = block + self.log.debug('randomized step: start mining new block', dt=dt, parents=[h.hex() for h in block.parents], + block_timestamp=block.timestamp) + else: + dt = 60 + + if dt > settings.WEIGHT_DECAY_ACTIVATE_DISTANCE: + self._block = None + dt = settings.WEIGHT_DECAY_ACTIVATE_DISTANCE + + if self._delayed_call and self._delayed_call.active(): + self._delayed_call.cancel() + self._delayed_call = self._clock.callLater(dt, self._schedule_next_block) + + def get_blocks_found(self) -> int: + return self._blocks_found diff --git a/hathor/simulator/simulator.py b/hathor/simulator/simulator.py index 912970125..6831c3bcf 100644 --- a/hathor/simulator/simulator.py +++ b/hathor/simulator/simulator.py @@ -20,19 +20,22 @@ from mnemonic import Mnemonic from structlog import get_logger +from hathor.builder import Builder +from hathor.conf import HathorSettings from hathor.daa import TestMode, _set_test_mode +from hathor.event.websocket import EventWebsocketFactory from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId from hathor.simulator.clock import HeapClock -from hathor.simulator.miner import MinerSimulator +from hathor.simulator.miner.geometric_miner import GeometricMiner from hathor.simulator.tx_generator import RandomTransactionGenerator from hathor.transaction.genesis import _get_genesis_transactions_unsafe -from hathor.transaction.storage.memory_storage import TransactionMemoryStorage from hathor.util import Random from hathor.wallet import HDWallet if TYPE_CHECKING: from hathor.simulator.fake_connection import FakeConnection + from hathor.simulator.trigger import Trigger logger = get_logger() @@ -59,6 +62,7 @@ def _apply_patches(cls): def verify_pow(self: BaseTransaction, *args: Any, **kwargs: Any) -> None: assert self.hash is not None + logger.new().debug('Skipping BaseTransaction.verify_pow() for simulator') cls._original_verify_pow = BaseTransaction.verify_pow BaseTransaction.verify_pow = verify_pow @@ -103,6 +107,7 @@ def __init__(self, seed: Optional[int] = None): seed = secrets.randbits(64) self.seed = seed self.rng = Random(self.seed) + self.settings = HathorSettings() self._network = 'testnet' self._clock = HeapClock() self._peers: OrderedDict[str, HathorManager] = OrderedDict() @@ -125,35 +130,40 @@ def stop(self) -> None: self._started = False self._patches_rc_decrement() - def create_peer(self, network: Optional[str] = None, peer_id: Optional[PeerId] = None, - enable_sync_v1: bool = True, enable_sync_v2: bool = True, - soft_voided_tx_ids: Optional[Set[bytes]] = None) -> HathorManager: - assert self._started - if network is None: - network = self._network + def create_peer( + self, + network: Optional[str] = None, + peer_id: Optional[PeerId] = None, + enable_sync_v1: bool = True, + enable_sync_v2: bool = True, + soft_voided_tx_ids: Optional[Set[bytes]] = None, + full_verification: bool = True, + event_ws_factory: Optional[EventWebsocketFactory] = None + ) -> HathorManager: + assert self._started, 'Simulator is not started.' + assert peer_id is not None # XXX: temporary, for checking that tests are using the peer_id wallet = HDWallet(gap_limit=2) wallet._manually_initialize() - assert peer_id is not None # XXX: temporary, for checking that tests are using the peer_id - if peer_id is None: - peer_id = PeerId() - tx_storage = TransactionMemoryStorage() - manager = HathorManager( - self._clock, - peer_id=peer_id, - network=network, - wallet=wallet, - enable_sync_v1=enable_sync_v1, - enable_sync_v2=enable_sync_v2, - tx_storage=tx_storage, - rng=Random(self.rng.getrandbits(64)), - soft_voided_tx_ids=soft_voided_tx_ids, - ) - - manager.reactor = self._clock - manager._full_verification = True - manager.start() + builder = Builder() \ + .set_reactor(self._clock) \ + .set_peer_id(peer_id or PeerId()) \ + .set_network(network or self._network) \ + .set_wallet(wallet) \ + .set_rng(Random(self.rng.getrandbits(64))) \ + .set_enable_sync_v1(enable_sync_v1) \ + .set_enable_sync_v2(enable_sync_v2) \ + .set_full_verification(full_verification) \ + .set_soft_voided_tx_ids(soft_voided_tx_ids or set()) \ + .use_memory() + + if event_ws_factory: + builder.enable_event_manager(event_ws_factory=event_ws_factory) + + artifacts = builder.build() + + artifacts.manager.start() self.run_to_completion() # Don't use it anywhere else. It is unsafe to generate mnemonic words like this. @@ -161,14 +171,15 @@ def create_peer(self, network: Optional[str] = None, peer_id: Optional[PeerId] = m = Mnemonic('english') words = m.to_mnemonic(self.rng.randbytes(32)) self.log.debug('randomized step: generate wallet', words=words) - wallet.unlock(words=words, tx_storage=manager.tx_storage) - return manager + wallet.unlock(words=words, tx_storage=artifacts.tx_storage) + + return artifacts.manager def create_tx_generator(self, peer: HathorManager, *args: Any, **kwargs: Any) -> RandomTransactionGenerator: return RandomTransactionGenerator(peer, self.rng, *args, **kwargs) - def create_miner(self, peer: HathorManager, *args: Any, **kwargs: Any) -> MinerSimulator: - return MinerSimulator(peer, self.rng, *args, **kwargs) + def create_miner(self, peer: HathorManager, *args: Any, **kwargs: Any) -> GeometricMiner: + return GeometricMiner(peer, self.rng, *args, **kwargs) def run_to_completion(self): """ This will advance the test's clock until all calls scheduled are done. @@ -184,6 +195,9 @@ def add_peer(self, name: str, peer: HathorManager) -> None: raise ValueError('Duplicate peer name') self._peers[name] = peer + def get_reactor(self) -> HeapClock: + return self._clock + def get_peer(self, name: str) -> HathorManager: return self._peers[name] @@ -240,7 +254,18 @@ def run_until_complete(self, def run(self, interval: float, step: float = DEFAULT_STEP_INTERVAL, - status_interval: float = DEFAULT_STATUS_INTERVAL) -> None: + status_interval: float = DEFAULT_STATUS_INTERVAL, + *, + trigger: Optional['Trigger'] = None) -> bool: + """Return True if it successfully ends the execution. + + If no trigger is provided, it always returns True. + If a trigger is provided, it returns True if the trigger stops the execution. Otherwise, it returns False. + """ assert self._started for _ in self._run(interval, step, status_interval): - pass + if trigger is not None and trigger.should_stop(): + return True + if trigger is not None: + return False + return True diff --git a/hathor/simulator/trigger.py b/hathor/simulator/trigger.py new file mode 100644 index 000000000..32a758bd0 --- /dev/null +++ b/hathor/simulator/trigger.py @@ -0,0 +1,56 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from hathor.simulator.miner import AbstractMiner + from hathor.wallet import BaseWallet + + +class Trigger(ABC): + """Abstract class to stop simulation when a certain condition is satisfied.""" + @abstractmethod + def should_stop(self) -> bool: + """This method must return True when the stop condition is satisfied.""" + raise NotImplementedError + + +class StopAfterNMinedBlocks(Trigger): + """Stop the simulation after `miner` finds N blocks. Note that these blocks might be orphan.""" + def __init__(self, miner: 'AbstractMiner', *, quantity: int) -> None: + self.miner = miner + self.quantity = quantity + self.reset() + + def reset(self) -> None: + """Reset the counter, so this trigger can be reused.""" + self.initial_blocks_found = self.miner.get_blocks_found() + + def should_stop(self) -> bool: + diff = self.miner.get_blocks_found() - self.initial_blocks_found + return diff >= self.quantity + + +class StopAfterMinimumBalance(Trigger): + """Stop the simulation after `wallet` reaches a minimum unlocked balance.""" + def __init__(self, wallet: 'BaseWallet', token_uid: bytes, minimum_balance: int) -> None: + self.wallet = wallet + self.token_uid = token_uid + self.minimum_balance = minimum_balance + + def should_stop(self) -> bool: + balance = self.wallet.balance[self.token_uid].available + return balance >= self.minimum_balance diff --git a/hathor/stratum/stratum.py b/hathor/stratum/stratum.py index e44e3dc04..f50992d6f 100644 --- a/hathor/stratum/stratum.py +++ b/hathor/stratum/stratum.py @@ -117,11 +117,14 @@ def __init__(self, jobid: UUID, created: int, miner: UUID, tx: BaseTransaction, class MinerJob(NamedTuple): """ Data class used to share job data between mining processes """ - data: 'ctypes.Array[ctypes.c_ubyte]' = Array('B', 2048) - data_size: 'ctypes.c_uint' = Value('I') - job_id: 'ctypes.Array[ctypes.c_ubyte]' = Array('B', 16) - nonce_size: 'ctypes.c_uint' = Value('I') - weight: 'ctypes.c_double' = Value('d') + # XXX: these typings are causing too much trouble, since this module hasn't been touched for a while and won't be + # touched for the foreseeable future (and will possibly even removed before any changes) it seems fine to just + # use Any so there aren't any mypy complaints anymore + data: Any = Array('B', 2048) + data_size: Any = Value('I') + job_id: Any = Array('B', 16) + nonce_size: Any = Value('I') + weight: Any = Value('d') def update_job(self, params: Dict[str, Any]) -> bool: """ @@ -818,7 +821,7 @@ class StratumClient(JSONRPC): job: Dict miners: List[Process] loop: Optional[task.LoopingCall] - signal: 'ctypes.c_ubyte' + signal: Any job_data: MinerJob address: Optional[bytes] diff --git a/hathor/sysctl/__init__.py b/hathor/sysctl/__init__.py new file mode 100644 index 000000000..34525e51c --- /dev/null +++ b/hathor/sysctl/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from hathor.sysctl.p2p.manager import ConnectionsManagerSysctl +from hathor.sysctl.sysctl import Sysctl + +__all__ = [ + 'Sysctl', + 'ConnectionsManagerSysctl', +] diff --git a/hathor/sysctl/exception.py b/hathor/sysctl/exception.py new file mode 100644 index 000000000..2fb05f862 --- /dev/null +++ b/hathor/sysctl/exception.py @@ -0,0 +1,28 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class SysctlException(Exception): + pass + + +class SysctlEntryNotFound(SysctlException): + pass + + +class SysctlReadOnlyEntry(SysctlException): + pass + + +class SysctlWriteOnlyEntry(SysctlException): + pass diff --git a/hathor/sysctl/factory.py b/hathor/sysctl/factory.py new file mode 100644 index 000000000..b9b1101e7 --- /dev/null +++ b/hathor/sysctl/factory.py @@ -0,0 +1,30 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import TYPE_CHECKING + +from twisted.internet.protocol import Factory + +from hathor.sysctl.protocol import SysctlProtocol + +if TYPE_CHECKING: + from hathor.sysctl.sysctl import Sysctl + + +class SysctlFactory(Factory): + def __init__(self, root: 'Sysctl') -> None: + self.root = root + + def buildProtocol(self, addr): + return SysctlProtocol(self.root) diff --git a/hathor/sysctl/p2p/__init__.py b/hathor/sysctl/p2p/__init__.py new file mode 100644 index 000000000..caba4a1be --- /dev/null +++ b/hathor/sysctl/p2p/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/hathor/sysctl/p2p/manager.py b/hathor/sysctl/p2p/manager.py new file mode 100644 index 000000000..3fc4779c4 --- /dev/null +++ b/hathor/sysctl/p2p/manager.py @@ -0,0 +1,147 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +from typing import List, Tuple + +from hathor.p2p.manager import ConnectionsManager +from hathor.sysctl.exception import SysctlException +from hathor.sysctl.sysctl import Sysctl + + +def parse_text(text: str) -> List[str]: + """Parse text per line skipping empty lines and comments.""" + ret: List[str] = [] + for line in text.splitlines(): + line = line.strip() + if not line: + continue + if line.startswith('#'): + continue + ret.append(line) + return ret + + +class ConnectionsManagerSysctl(Sysctl): + def __init__(self, connections: ConnectionsManager) -> None: + super().__init__() + + self.connections = connections + self.register( + 'max_enabled_sync', + self.get_max_enabled_sync, + self.set_max_enabled_sync, + ) + self.register( + 'rate_limit.global.send_tips', + self.get_global_send_tips_rate_limit, + self.set_global_send_tips_rate_limit, + ) + self.register( + 'sync_update_interval', + self.get_lc_sync_update_interval, + self.set_lc_sync_update_interval, + ) + self.register( + 'force_sync_rotate', + None, + self.set_force_sync_rotate, + ) + self.register( + 'always_enable_sync', + self.get_always_enable_sync, + self.set_always_enable_sync, + ) + self.register( + 'always_enable_sync.readtxt', + None, + self.set_always_enable_sync_readtxt, + ) + + ############# + + def set_force_sync_rotate(self) -> None: + """Force a sync rotate.""" + self.connections._sync_rotate_if_needed(force=True) + + ############# + + def get_global_send_tips_rate_limit(self) -> Tuple[int, float]: + """Return the global rate limiter for SEND_TIPS.""" + limit = self.connections.rate_limiter.get_limit(self.connections.GlobalRateLimiter.SEND_TIPS) + if limit is None: + return (0, 0) + return (limit.max_hits, limit.window_seconds) + + def set_global_send_tips_rate_limit(self, max_hits: int, window_seconds: float) -> None: + """Change the global rate limiter for SEND_TIPS. + + The rate limiter is disabled when `window_seconds == 0`.""" + if window_seconds == 0: + self.connections.disable_rate_limiter() + return + if max_hits < 0: + raise SysctlException('max_hits must be >= 0') + if window_seconds < 0: + raise SysctlException('window_seconds must be >= 0') + self.connections.enable_rate_limiter(max_hits, window_seconds) + + ############# + + def get_lc_sync_update_interval(self) -> float: + """Return the interval to rotate sync (in seconds).""" + return self.connections.lc_sync_update_interval + + def set_lc_sync_update_interval(self, value: float) -> None: + """Change the interval to rotate sync (in seconds).""" + if value <= 0: + raise SysctlException('value must be > 0') + self.connections.lc_sync_update_interval = value + if self.connections.lc_sync_update.running: + self.connections.lc_sync_update.stop() + self.connections.lc_sync_update.start(self.connections.lc_sync_update_interval, now=False) + + ############# + + def get_always_enable_sync(self) -> List[str]: + """Return the list of sync-always-enabled peers.""" + return list(self.connections.always_enable_sync) + + def set_always_enable_sync(self, values: List[str]) -> None: + """Change the list of sync-always-enabled peers.""" + self.connections.set_always_enable_sync(values) + + def set_always_enable_sync_readtxt(self, file_path: str) -> None: + """Update the list of sync-always-enabled peers from a file.""" + if not os.path.isfile(file_path): + raise SysctlException(f'file not found: {file_path}') + values: List[str] + with open(file_path, 'r') as fp: + values = parse_text(fp.read()) + self.connections.set_always_enable_sync(values) + + ############# + + def get_max_enabled_sync(self) -> int: + """Return the maximum number of peers running sync simultaneously.""" + return self.connections.MAX_ENABLED_SYNC + + def set_max_enabled_sync(self, value: int) -> None: + """Change the maximum number of peers running sync simultaneously.""" + if value < 0: + raise SysctlException('value must be >= 0') + if value == self.connections.MAX_ENABLED_SYNC: + return + self.connections.MAX_ENABLED_SYNC = value + self.connections._sync_rotate_if_needed(force=True) diff --git a/hathor/sysctl/protocol.py b/hathor/sysctl/protocol.py new file mode 100644 index 000000000..45c879dc7 --- /dev/null +++ b/hathor/sysctl/protocol.py @@ -0,0 +1,109 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from typing import TYPE_CHECKING, Any + +from pydantic import ValidationError +from twisted.protocols.basic import LineReceiver + +from hathor.sysctl.exception import SysctlEntryNotFound, SysctlException, SysctlReadOnlyEntry, SysctlWriteOnlyEntry + +if TYPE_CHECKING: + from hathor.sysctl.sysctl import Sysctl + + +class SysctlProtocol(LineReceiver): + delimiter = b'\n' + + def __init__(self, root: 'Sysctl') -> None: + self.root = root + + def lineReceived(self, raw: bytes) -> None: + try: + line = raw.decode('utf-8').strip() + except UnicodeDecodeError: + self.sendError('command is not utf-8 valid') + if line == '!backup': + self.backup() + return + head, separator, tail = line.partition('=') + head = head.strip() + tail = tail.strip() + if separator == '=': + self.set(head, tail) + else: + self.get(head) + + def sendError(self, msg: str) -> None: + """Send an error message to the client. Used when a command fails.""" + self.sendLine(f'[error] {msg}'.encode('utf-8')) + + def set(self, path: str, value_str: str) -> None: + """Run a `set` command in sysctl.""" + try: + value = self._deserialize(value_str) + except json.JSONDecodeError: + self.sendError('value: wrong format') + return + + try: + self.root.set(path, value) + except SysctlEntryNotFound: + self.sendError(f'{path} not found') + except SysctlReadOnlyEntry: + self.sendError(f'cannot write to {path}') + except SysctlException as e: + self.sendError(str(e)) + except ValidationError as e: + self.sendError(str(e)) + except TypeError as e: + self.sendError(str(e)) + + def get(self, path: str) -> None: + """Run a `get` command in sysctl.""" + try: + value = self.root.get(path) + except SysctlEntryNotFound: + self.sendError(f'{path} not found') + except SysctlWriteOnlyEntry: + self.sendError(f'cannot read from {path}') + else: + output = self._serialize(value) + self.sendLine(output.encode('utf-8')) + + def backup(self) -> None: + """Run a `backup` command, sending all parameters to the client.""" + for key, value in self.root.get_all(): + output = f'{key}={self._serialize(value)}' + self.sendLine(output.encode('utf-8')) + + def _serialize(self, value: Any) -> str: + """Serialize the return of a sysctl getter.""" + output: str + if isinstance(value, tuple): + parts = (json.dumps(x) for x in value) + output = ', '.join(parts) + else: + output = json.dumps(value) + return output + + def _deserialize(self, value_str: str) -> Any: + """Deserialize a value sent by the client.""" + if len(value_str) == 0: + return () + parts = [x.strip() for x in value_str.split(',')] + if len(parts) > 1: + return tuple(json.loads(x) for x in parts) + return json.loads(value_str) diff --git a/hathor/sysctl/sysctl.py b/hathor/sysctl/sysctl.py new file mode 100644 index 000000000..c45f62667 --- /dev/null +++ b/hathor/sysctl/sysctl.py @@ -0,0 +1,104 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any, Callable, Dict, Iterator, NamedTuple, Optional, Tuple + +from pydantic import validate_arguments + +from hathor.sysctl.exception import SysctlEntryNotFound, SysctlReadOnlyEntry, SysctlWriteOnlyEntry + +Getter = Callable[[], Any] +Setter = Callable[..., None] + + +class SysctlCommand(NamedTuple): + getter: Optional[Getter] + setter: Optional[Setter] + + +class Sysctl: + """A node in the sysctl tree.""" + + def __init__(self) -> None: + self._children: Dict[str, 'Sysctl'] = {} + self._commands: Dict[str, SysctlCommand] = {} + + def put_child(self, path: str, sysctl: 'Sysctl') -> None: + """Add a child to the tree.""" + assert path not in self._children + self._children[path] = sysctl + + def register(self, path: str, getter: Optional[Getter], setter: Optional[Setter]) -> None: + """Register a new parameter for sysctl.""" + assert path not in self._commands + if setter is not None: + setter = validate_arguments(setter) + self._commands[path] = SysctlCommand( + getter=getter, + setter=setter, + ) + + def get_command(self, path: str) -> SysctlCommand: + """Find and return the sysctl of the provided path.""" + if path in self._commands: + return self._commands[path] + for key, child in self._children.items(): + if not path.startswith(f'{key}.'): + continue + tail = path[len(key) + 1:] + return child.get_command(tail) + raise SysctlEntryNotFound(path) + + def _get_getter(self, path: str) -> Getter: + """Return the getter method of a path.""" + cmd = self.get_command(path) + if cmd.getter is None: + raise SysctlWriteOnlyEntry(path) + return cmd.getter + + def _get_setter(self, path: str) -> Setter: + """Return the setter method of a path.""" + cmd = self.get_command(path) + if cmd.setter is None: + raise SysctlReadOnlyEntry(path) + return cmd.setter + + def get(self, path: str) -> Any: + """Run a get in sysctl.""" + getter = self._get_getter(path) + return getter() + + def set(self, path: str, value: Any) -> None: + """Run a set in sysctl.""" + setter = self._get_setter(path) + if isinstance(value, tuple): + setter(*value) + else: + setter(value) + + def path_join(self, p1: str, p2: str) -> str: + """Util to join two paths.""" + if not p1: + return p2 + return f'{p1}.{p2}' + + def get_all(self, prefix: str = '') -> Iterator[Tuple[str, Any]]: + """Return all paths and values, usually for backup.""" + for path, child in self._children.items(): + yield from child.get_all(self.path_join(prefix, path)) + for path, cmd in self._commands.items(): + if cmd.getter is None: + continue + value = cmd.getter() + yield (self.path_join(prefix, path), value) diff --git a/hathor/transaction/base_transaction.py b/hathor/transaction/base_transaction.py index ce728102f..e93defc3b 100644 --- a/hathor/transaction/base_transaction.py +++ b/hathor/transaction/base_transaction.py @@ -1185,7 +1185,7 @@ def __repr__(self) -> str: def __str__(self) -> str: cls_name = type(self).__name__ - value_str = hex(self.value) if self.is_token_authority else str(self.value) + value_str = hex(self.value) if self.is_token_authority() else str(self.value) if self.token_data: return f'{cls_name}(token_data={bin(self.token_data)}, value={value_str}, script={self.script.hex()})' else: diff --git a/hathor/transaction/resources/transaction_confirmation.py b/hathor/transaction/resources/transaction_confirmation.py index 06f1bd086..bc74f42d2 100644 --- a/hathor/transaction/resources/transaction_confirmation.py +++ b/hathor/transaction/resources/transaction_confirmation.py @@ -32,7 +32,7 @@ def __init__(self, manager): # Important to have the manager so we can know the tx_storage self.manager = manager - def _render_GET_data(self, requested_hash): + def _render_GET_data(self, requested_hash: str) -> Dict[str, Any]: success, message = validate_tx_hash(requested_hash, self.manager.tx_storage) if not success: return {'success': False, 'message': message} diff --git a/hathor/transaction/storage/cache_storage.py b/hathor/transaction/storage/cache_storage.py index 56d37b55b..f662b22f5 100644 --- a/hathor/transaction/storage/cache_storage.py +++ b/hathor/transaction/storage/cache_storage.py @@ -50,7 +50,7 @@ def __init__(self, store: 'BaseTransactionStorage', reactor: Reactor, interval: transaction/blocks/metadata when returning those objects. :type _clone_if_needed: bool """ - if store.with_index: + if store.indexes is not None: raise ValueError('internal storage cannot have indexes enabled') store.remove_cache() diff --git a/hathor/transaction/storage/memory_storage.py b/hathor/transaction/storage/memory_storage.py index c363ce188..8d57eb1ae 100644 --- a/hathor/transaction/storage/memory_storage.py +++ b/hathor/transaction/storage/memory_storage.py @@ -89,7 +89,7 @@ def _get_transaction(self, hash_bytes: bytes) -> BaseTransaction: else: raise TransactionDoesNotExist(hash_bytes.hex()) - def get_all_transactions(self) -> Iterator[BaseTransaction]: + def get_all_transactions(self, *, include_partial: bool = False) -> Iterator[BaseTransaction]: for tx in self.transactions.values(): tx = self._clone(tx) if tx.hash in self.metadata: diff --git a/hathor/transaction/storage/rocksdb_storage.py b/hathor/transaction/storage/rocksdb_storage.py index c6e48feae..1a03df316 100644 --- a/hathor/transaction/storage/rocksdb_storage.py +++ b/hathor/transaction/storage/rocksdb_storage.py @@ -146,7 +146,7 @@ def _get_tx(self, hash_bytes: bytes, tx_data: bytes) -> 'BaseTransaction': self._save_to_weakref(tx) return tx - def get_all_transactions(self) -> Iterator['BaseTransaction']: + def get_all_transactions(self, *, include_partial: bool = False) -> Iterator['BaseTransaction']: tx: Optional['BaseTransaction'] items = self._db.iteritems(self._cf_tx) @@ -163,6 +163,10 @@ def get_all_transactions(self) -> Iterator['BaseTransaction']: tx = self._get_tx(hash_bytes, tx_data) assert tx is not None + if not include_partial: + assert tx._metadata is not None + if not tx._metadata.validation.is_fully_connected(): + continue yield tx def is_empty(self) -> bool: diff --git a/hathor/transaction/storage/transaction_storage.py b/hathor/transaction/storage/transaction_storage.py index f72c7cb19..d74bd5ae7 100644 --- a/hathor/transaction/storage/transaction_storage.py +++ b/hathor/transaction/storage/transaction_storage.py @@ -24,6 +24,7 @@ from hathor.conf import HathorSettings from hathor.indexes import IndexesManager, MemoryIndexesManager +from hathor.profiler import get_cpu_profiler from hathor.pubsub import PubSubManager from hathor.transaction.base_transaction import BaseTransaction from hathor.transaction.block import Block @@ -34,6 +35,7 @@ from hathor.util import not_none settings = HathorSettings() +cpu = get_cpu_profiler() # these are the timestamp values to be used when resetting them, 1 is used for the node instead of 0, so it can be # greater, that way if both are reset (which also happens on a database that never run this implementation before) we @@ -54,7 +56,6 @@ class TransactionStorage(ABC): """Legacy sync interface, please copy @deprecated decorator when implementing methods.""" pubsub: Optional[PubSubManager] - with_index: bool indexes: Optional[IndexesManager] log = get_logger() @@ -78,7 +79,7 @@ class TransactionStorage(ABC): _migrations: List[BaseMigration] - def __init__(self): + def __init__(self) -> None: # Weakref is used to guarantee that there is only one instance of each transaction in memory. self._tx_weakref: WeakValueDictionary[bytes, BaseTransaction] = WeakValueDictionary() self._tx_weakref_disabled: bool = False @@ -95,7 +96,7 @@ def __init__(self): # Cache for the best block tips # This cache is updated in the consensus algorithm. - self._best_block_tips_cache = None + self._best_block_tips_cache: Optional[List[bytes]] = None # If should create lock when getting a transaction self._should_lock = False @@ -161,7 +162,7 @@ def get_migration_state(self, migration_name: str) -> MigrationState: def set_migration_state(self, migration_name: str, state: MigrationState) -> None: raise NotImplementedError - def _check_and_apply_migrations(self): + def _check_and_apply_migrations(self) -> None: """Check which migrations have not been run yet and apply them in order.""" from hathor.transaction.storage.exceptions import OutOfOrderMigrationError, PartialMigrationError db_is_empty = self.is_empty() @@ -361,7 +362,7 @@ def remove_transaction(self, tx: BaseTransaction) -> None: :param tx: Trasaction to be removed """ - if self.with_index: + if self.indexes is not None: self.del_from_indexes(tx, remove_all=True, relax_assert=True) def remove_transactions(self, txs: List[BaseTransaction]) -> None: @@ -481,7 +482,7 @@ def get_metadata(self, hash_bytes: bytes) -> Optional[TransactionMetadata]: return None @abstractmethod - def get_all_transactions(self) -> Iterator[BaseTransaction]: + def get_all_transactions(self, *, include_partial: bool = False) -> Iterator[BaseTransaction]: # TODO: verify the following claim: """Return all transactions that are not blocks. """ @@ -553,6 +554,7 @@ def get_height_best_block(self) -> int: return highest_height + @cpu.profiler('get_merkle_tree') def get_merkle_tree(self, timestamp: int) -> Tuple[bytes, List[bytes]]: """ Generate a hash to check whether the DAG is the same at that timestamp. @@ -942,6 +944,8 @@ def get_transactions_that_became_invalid(self) -> List[BaseTransaction]: class BaseTransactionStorage(TransactionStorage): + indexes: Optional[IndexesManager] + def __init__(self, with_index: bool = True, pubsub: Optional[Any] = None) -> None: super().__init__() @@ -949,9 +953,10 @@ def __init__(self, with_index: bool = True, pubsub: Optional[Any] = None) -> Non self.pubsub = pubsub # Initialize index if needed. - self.with_index = with_index if with_index: self.indexes = self._build_indexes_manager() + else: + self.indexes = None # Either save or verify all genesis. self._save_or_verify_genesis() @@ -975,13 +980,11 @@ def _build_indexes_manager(self) -> IndexesManager: def reset_indexes(self) -> None: """Reset all indexes. This function should not be called unless you know what you are doing.""" - assert self.with_index, 'Cannot reset indexes because they have not been enabled.' - assert self.indexes is not None + assert self.indexes is not None, 'Cannot reset indexes because they have not been enabled.' self.indexes.force_clear_all() def remove_cache(self) -> None: """Remove all caches in case we don't need it.""" - self.with_index = False self.indexes = None def get_best_block_tips(self, timestamp: Optional[float] = None, *, skip_cache: bool = False) -> List[bytes]: @@ -991,7 +994,7 @@ def get_weight_best_block(self) -> float: return super().get_weight_best_block() def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None if timestamp is None: @@ -999,7 +1002,7 @@ def get_block_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: return self.indexes.block_tips[timestamp] def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None if timestamp is None: @@ -1016,7 +1019,7 @@ def get_tx_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: return tips def get_all_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None if timestamp is None: @@ -1034,7 +1037,7 @@ def get_all_tips(self, timestamp: Optional[float] = None) -> Set[Interval]: return tips def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None block_hashes, has_more = self.indexes.sorted_blocks.get_newest(count) @@ -1042,7 +1045,7 @@ def get_newest_blocks(self, count: int) -> Tuple[List[Block], bool]: return blocks, has_more def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None tx_hashes, has_more = self.indexes.sorted_txs.get_newest(count) @@ -1050,7 +1053,7 @@ def get_newest_txs(self, count: int) -> Tuple[List[BaseTransaction], bool]: return txs, has_more def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[Block], bool]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None block_hashes, has_more = self.indexes.sorted_blocks.get_older(timestamp, hash_bytes, count) @@ -1059,7 +1062,7 @@ def get_older_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None block_hashes, has_more = self.indexes.sorted_blocks.get_newer(timestamp, hash_bytes, count) @@ -1067,7 +1070,7 @@ def get_newer_blocks_after(self, timestamp: int, hash_bytes: bytes, return blocks, has_more def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None tx_hashes, has_more = self.indexes.sorted_txs.get_older(timestamp, hash_bytes, count) @@ -1075,7 +1078,7 @@ def get_older_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> return txs, has_more def get_newer_txs_after(self, timestamp: int, hash_bytes: bytes, count: int) -> Tuple[List[BaseTransaction], bool]: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None tx_hashes, has_more = self.indexes.sorted_txs.get_newer(timestamp, hash_bytes, count) @@ -1112,7 +1115,7 @@ def _topological_sort_timestamp_index(self) -> Iterator[BaseTransaction]: yield from cur_blocks yield from cur_txs - def _topological_sort_metadata(self) -> Iterator[BaseTransaction]: + def _topological_sort_metadata(self, *, include_partial: bool = False) -> Iterator[BaseTransaction]: import heapq from dataclasses import dataclass, field @@ -1203,7 +1206,7 @@ def _run_topological_sort_dfs(self, root: BaseTransaction, visited: Dict[bytes, stack.append(txinput) def add_to_indexes(self, tx: BaseTransaction) -> None: - if not self.with_index: + if self.indexes is None: if self._saving_genesis: # XXX: avoid failing on some situations where this is called before we know it's OK to skip # see: https://github.com/HathorNetwork/hathor-core/pull/436 @@ -1215,25 +1218,25 @@ def add_to_indexes(self, tx: BaseTransaction) -> None: self.indexes.add_tx(tx) def del_from_indexes(self, tx: BaseTransaction, *, remove_all: bool = False, relax_assert: bool = False) -> None: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None self.indexes.del_tx(tx, remove_all=remove_all, relax_assert=relax_assert) def get_block_count(self) -> int: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None return self.indexes.info.get_block_count() def get_tx_count(self) -> int: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None return self.indexes.info.get_tx_count() def get_vertices_count(self) -> int: - if not self.with_index: + if self.indexes is None: raise NotImplementedError assert self.indexes is not None return self.indexes.info.get_vertices_count() diff --git a/hathor/util.py b/hathor/util.py index 1389927ad..e5dcf85d7 100644 --- a/hathor/util.py +++ b/hathor/util.py @@ -446,8 +446,7 @@ def progress(iter_tx: Iterator['BaseTransaction'], *, log: Optional['structlog.s if log is None: log = logger.new() - with manualgc(): - yield from _progress(iter_tx, log=log, total=total) + yield from _progress(iter_tx, log=log, total=total) def _progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib.BoundLogger', total: Optional[int] @@ -495,11 +494,6 @@ def _progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib.Bo log.info(f'loading... {math.floor(progress * 100):2.0f}%', progress=progress, **kwargs) else: log.info('loading...', **kwargs) - # XXX: this collections will happen every _DT_LOG_PROGRESS (=30s) on average, which is good, and because - # automatic collection should be disabled, it won't happen during processing of transactions, which - # can make it seem like a transaction took more time to be processed when has nothing to do with the - # transaction itself - gc.collect() count_log_prev = count count += 1 @@ -515,10 +509,10 @@ def _progress(iter_tx: Iterator['BaseTransaction'], *, log: 'structlog.stdlib.Bo dt_yield = t_after_yield - t_before_yield if dt_yield > _DT_YIELD_WARN: dt = LogDuration(dt_yield) - log.warn('tx took too long to be processed', tx=tx.hash_hex, dt=dt) - - # one final collection before finishing the loading process - gc.collect() + # The loglevel was changed to debug because most of the causes of slowness + # is related to the gc acting during the tx processing. We had previously + # disabled the gc but it caused a too high CPU usage. + log.debug('tx took too long to be processed (gc?!)', tx=tx.hash_hex, dt=dt) t_final = time.time() dt_total = LogDuration(t_final - t_start) diff --git a/hathor/utils/__init__.py b/hathor/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/hathor/utils/api.py b/hathor/utils/api.py new file mode 100644 index 000000000..f614cdab3 --- /dev/null +++ b/hathor/utils/api.py @@ -0,0 +1,60 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import cgi +from typing import Union + +from pydantic import Field, ValidationError, validator +from twisted.web.http import Request + +from hathor.api_util import get_args +from hathor.utils.list import single_or_none +from hathor.utils.pydantic import BaseModel + + +class QueryParams(BaseModel): + """Class used to parse Twisted HTTP Request query parameters. + + Subclass this class defining your query parameters as attributes and their respective types, then call the + from_request() class method to instantiate your class from the provided request. + """ + _list_to_single_item_validator = validator('*', pre=True, allow_reuse=True)(single_or_none) + + @classmethod + def from_request(cls, request: Request) -> Union['QueryParams', 'ErrorResponse']: + """Creates an instance from a Twisted Request.""" + encoding = 'utf8' + + if content_type_header := request.requestHeaders.getRawHeaders('content-type'): + _, options = cgi.parse_header(content_type_header[0]) + encoding = options.get('charset', encoding) + + raw_args = get_args(request).items() + args = { + key.decode(encoding): [value.decode(encoding) for value in values] + for key, values in raw_args + } + + try: + return cls.parse_obj(args) + except ValidationError as error: + return ErrorResponse(error=str(error)) + + +class Response(BaseModel): + pass + + +class ErrorResponse(Response): + success: bool = Field(default=False, const=True) + error: str diff --git a/hathor/utils/list.py b/hathor/utils/list.py new file mode 100644 index 000000000..37c60ffe2 --- /dev/null +++ b/hathor/utils/list.py @@ -0,0 +1,33 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Optional, TypeVar + +T = TypeVar('T') + + +def single_or_none(_list: List[T]) -> Optional[T]: + """Function to convert a list with at most one element to the given element or None. + >>> single_or_none([]) is None + True + >>> single_or_none([1]) + 1 + >>> single_or_none([1, 2]) + Traceback (most recent call last): + ... + AssertionError: expected one value at most + """ + assert len(_list) <= 1, 'expected one value at most' + + return None if not len(_list) else _list[0] diff --git a/hathor/utils/pydantic.py b/hathor/utils/pydantic.py new file mode 100644 index 000000000..6b5be9fa2 --- /dev/null +++ b/hathor/utils/pydantic.py @@ -0,0 +1,45 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pydantic import BaseModel as PydanticBaseModel, Extra +from pydantic.generics import GenericModel as PydanticGenericModel + +from hathor.util import json_dumpb + + +class BaseModel(PydanticBaseModel): + """Substitute for pydantic's BaseModel. + This class defines a project BaseModel to be used instead of pydantic's, setting stricter global configurations. + Other configurations can be set on a case by case basis. + + Read: https://docs.pydantic.dev/usage/model_config/#change-behaviour-globally + """ + + def json_dumpb(self) -> bytes: + """Utility method for converting a Model into bytes representation of a JSON.""" + return json_dumpb(self.dict()) + + class Config: + allow_mutation = False + extra = Extra.forbid + + +class GenericModel(BaseModel, PydanticGenericModel): + """Substitute for pydantic's GenericModel. + This class defines a project GenericModel to be used instead of pydantic's, setting stricter global configurations. + Other configurations can be set on a case by case basis. + + Read: https://docs.pydantic.dev/usage/model_config/#change-behaviour-globally + """ + pass diff --git a/hathor/version.py b/hathor/version.py index 7e4b33f8d..e7e15ffaa 100644 --- a/hathor/version.py +++ b/hathor/version.py @@ -12,4 +12,70 @@ # See the License for the specific language governing permissions and # limitations under the License. -__version__ = '0.52.3' +import os +import re +import subprocess +from typing import Optional + +from structlog import get_logger + +BASE_VERSION = '0.53.0' + +DEFAULT_VERSION_SUFFIX = "local" +BUILD_VERSION_FILE_PATH = "./BUILD_VERSION" + +# Valid formats: 1.2.3, 1.2.3-rc.1 and nightly-ab49c20f +BUILD_VERSION_REGEX = r"^(\d+\.\d+\.\d+(-rc\.\d+)?|nightly-[a-f0-9]{7,8})$" + + +logger = get_logger() + + +def _get_build_version() -> Optional[str]: + """Try to get the build version from BUILD_VERSION_FILE_PATH and validate it. + + :return: The build version or None, if there is no file or the version is invalid. + """ + if not os.path.isfile(BUILD_VERSION_FILE_PATH): + return None + + with open(BUILD_VERSION_FILE_PATH, 'r') as f: + build_version = f.readline().strip() + match = re.match(BUILD_VERSION_REGEX, build_version) + + if match: + return build_version + else: + logger.warn("A build version with an invalid format was found. Ignoring it.", build_version=build_version) + return None + + +def _get_git_revision_short_hash() -> Optional[str]: + try: + return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode('ascii').strip() + except (subprocess.CalledProcessError, FileNotFoundError): + logger.warn(( + "Error while trying to get local git head. There is either no git available or we aren't in a git repo. " + "Will report local version without any git info." + )) + return None + + +def _get_local_version() -> str: + git_head = _get_git_revision_short_hash() + + if git_head: + return f"{BASE_VERSION}-{git_head}-{DEFAULT_VERSION_SUFFIX}" + + return f"{BASE_VERSION}-{DEFAULT_VERSION_SUFFIX}" + + +def _get_version() -> str: + """Get the current hathor-core version from the build version or the default one with a local suffix + + :return: The current hathor-core version + """ + return _get_build_version() or _get_local_version() + + +__version__ = _get_version() diff --git a/hathor/wallet/base_wallet.py b/hathor/wallet/base_wallet.py index efdbd2a18..32e7b8054 100644 --- a/hathor/wallet/base_wallet.py +++ b/hathor/wallet/base_wallet.py @@ -516,27 +516,28 @@ def on_new_tx(self, tx: BaseTransaction) -> None: # Nothing to do! return - updated = False + should_update = False # check outputs for index, output in enumerate(tx.outputs): script_type_out = parse_address_script(output.script) - if script_type_out: - if script_type_out.address in self.keys: - self.log.debug('detected tx output', tx=tx.hash_hex, index=index, address=script_type_out.address) - token_id = tx.get_token_uid(output.get_token_index()) - # this wallet received tokens - utxo = UnspentTx(tx.hash, index, output.value, tx.timestamp, script_type_out.address, - output.token_data, timelock=script_type_out.timelock) - self.unspent_txs[token_id][(tx.hash, index)] = utxo - # mark key as used - self.tokens_received(script_type_out.address) - updated = True - # publish new output and new balance - self.publish_update(HathorEvents.WALLET_OUTPUT_RECEIVED, total=self.get_total_tx(), output=utxo) - else: + if not script_type_out: # it's the only one we know, so log warning self.log.warn('unknown script') + continue + if script_type_out.address not in self.keys: + continue + self.log.debug('detected tx output', tx=tx.hash_hex, index=index, address=script_type_out.address) + token_id = tx.get_token_uid(output.get_token_index()) + # this wallet received tokens + utxo = UnspentTx(tx.hash, index, output.value, tx.timestamp, script_type_out.address, + output.token_data, timelock=script_type_out.timelock) + self.unspent_txs[token_id][(tx.hash, index)] = utxo + # mark key as used + self.tokens_received(script_type_out.address) + should_update = True + # publish new output and new balance + self.publish_update(HathorEvents.WALLET_OUTPUT_RECEIVED, total=self.get_total_tx(), output=utxo) # check inputs for _input in tx.inputs: @@ -546,33 +547,34 @@ def on_new_tx(self, tx: BaseTransaction) -> None: token_id = output_tx.get_token_uid(output.get_token_index()) script_type_out = parse_address_script(output.script) - if script_type_out: - if script_type_out.address in self.keys: - # this wallet spent tokens - # remove from unspent_txs - key = (_input.tx_id, _input.index) - old_utxo = self.unspent_txs[token_id].pop(key, None) - if old_utxo is None: - old_utxo = self.maybe_spent_txs[token_id].pop(key, None) - if old_utxo: - # add to spent_txs - spent = SpentTx(tx.hash, _input.tx_id, _input.index, old_utxo.value, tx.timestamp) - self.spent_txs[key].append(spent) - updated = True - # publish spent output and new balance - self.publish_update(HathorEvents.WALLET_INPUT_SPENT, output_spent=spent) - else: - # If we dont have it in the unspent_txs, it must be in the spent_txs - # So we append this spent with the others - if key in self.spent_txs: - output_tx = tx.storage.get_transaction(_input.tx_id) - output = output_tx.outputs[_input.index] - spent = SpentTx(tx.hash, _input.tx_id, _input.index, output.value, tx.timestamp) - self.spent_txs[key].append(spent) - else: + if not script_type_out: self.log.warn('unknown input data') + continue + if script_type_out.address not in self.keys: + continue + # this wallet spent tokens + # remove from unspent_txs + key = (_input.tx_id, _input.index) + old_utxo = self.unspent_txs[token_id].pop(key, None) + if old_utxo is None: + old_utxo = self.maybe_spent_txs[token_id].pop(key, None) + if old_utxo: + # add to spent_txs + spent = SpentTx(tx.hash, _input.tx_id, _input.index, old_utxo.value, tx.timestamp) + self.spent_txs[key].append(spent) + should_update = True + # publish spent output and new balance + self.publish_update(HathorEvents.WALLET_INPUT_SPENT, output_spent=spent) + else: + # If we dont have it in the unspent_txs, it must be in the spent_txs + # So we append this spent with the others + if key in self.spent_txs: + output_tx = tx.storage.get_transaction(_input.tx_id) + output = output_tx.outputs[_input.index] + spent = SpentTx(tx.hash, _input.tx_id, _input.index, output.value, tx.timestamp) + self.spent_txs[key].append(spent) - if updated: + if should_update: # TODO update history file? # XXX should wallet always update it or it will be called externally? self.update_balance() @@ -613,31 +615,33 @@ def on_tx_voided(self, tx: Transaction) -> None: for index, tx_output in enumerate(tx.outputs): script_type_out = parse_address_script(tx_output.script) token_id = tx.get_token_uid(tx_output.get_token_index()) - if script_type_out: - if script_type_out.address in self.keys: - # Remove this output from unspent_tx, if still there - key = (tx.hash, index) - utxo = self.unspent_txs[token_id].pop(key, None) - if utxo is None: - utxo = self.maybe_spent_txs[token_id].pop(key, None) - if utxo: - # Output found: update balance - should_update = True - else: - # If it is in spent tx, remove from dict - if key in self.spent_txs: - should_update = True - del self.spent_txs[key] - - # Save in voided_unspent, if it's not there yet - # First try to find it in voided_unspent - voided_utxo = self.voided_unspent.get(key, None) - if not voided_utxo: - # If it's not there, we add it - voided = UnspentTx(tx.hash, index, tx_output.value, tx.timestamp, script_type_out.address, - tx_output.token_data, voided=True, timelock=script_type_out.timelock) - self.voided_unspent[key] = voided - should_update = True + if not script_type_out: + continue + if script_type_out.address not in self.keys: + continue + # Remove this output from unspent_tx, if still there + key = (tx.hash, index) + utxo = self.unspent_txs[token_id].pop(key, None) + if utxo is None: + utxo = self.maybe_spent_txs[token_id].pop(key, None) + if utxo: + # Output found: update balance + should_update = True + else: + # If it is in spent tx, remove from dict + if key in self.spent_txs: + should_update = True + del self.spent_txs[key] + + # Save in voided_unspent, if it's not there yet + # First try to find it in voided_unspent + voided_utxo = self.voided_unspent.get(key, None) + if not voided_utxo: + # If it's not there, we add it + voided = UnspentTx(tx.hash, index, tx_output.value, tx.timestamp, script_type_out.address, + tx_output.token_data, voided=True, timelock=script_type_out.timelock) + self.voided_unspent[key] = voided + should_update = True # check inputs for _input in tx.inputs: @@ -645,57 +649,59 @@ def on_tx_voided(self, tx: Transaction) -> None: output_ = output_tx.outputs[_input.index] script_type_out = parse_address_script(output_.script) token_id = output_tx.get_token_uid(output_.get_token_index()) - if script_type_out: - if script_type_out.address in self.keys: - output: Optional[TxOutput] = None - # Try to find in spent tx - key = (_input.tx_id, _input.index) - if key in self.spent_txs: - list_index = -1 - for i, spent in enumerate(self.spent_txs[key]): - if (spent.tx_id == tx.hash and spent.from_index == _input.index - and spent.from_tx_id == _input.tx_id): - list_index = i - break - - if list_index > -1: - # Spent found: remove from list - spent = self.spent_txs[key].pop(list_index) - - if len(self.spent_txs[key]) == 0: - # If this was the last input that spent this output, we recreate the output - output_tx = tx.storage.get_transaction(spent.from_tx_id) - output = output_tx.outputs[spent.from_index] - assert output is not None - - script_type_out = parse_address_script(output.script) - if script_type_out and script_type_out.address in self.keys: - utxo = UnspentTx(_input.tx_id, _input.index, output.value, - output_tx.timestamp, script_type_out.address, - output.token_data, timelock=script_type_out.timelock) - self.unspent_txs[token_id][key] = utxo - - should_update = True - - # Save in voided_spent, if it's not there yet - # First try to find it in voided_spent - voided_stxi_list = self.voided_spent.get(key, []) - list_index = -1 - for i, spent in enumerate(voided_stxi_list): - if (spent.tx_id == tx.hash and spent.from_index == _input.index - and spent.from_tx_id == _input.tx_id): - list_index = i - break - if list_index == -1: - # If it's not there, we add it - if output is None: - output_tx = tx.storage.get_transaction(_input.tx_id) - output = output_tx.outputs[_input.index] - - voided_spent = SpentTx(tx.hash, _input.tx_id, _input.index, output.value, tx.timestamp, - voided=True) - self.voided_spent[key].append(voided_spent) - should_update = True + if not script_type_out: + continue + if script_type_out.address not in self.keys: + continue + output: Optional[TxOutput] = None + # Try to find in spent tx + key = (_input.tx_id, _input.index) + if key in self.spent_txs: + list_index = -1 + for i, spent in enumerate(self.spent_txs[key]): + if (spent.tx_id == tx.hash and spent.from_index == _input.index + and spent.from_tx_id == _input.tx_id): + list_index = i + break + + if list_index > -1: + # Spent found: remove from list + spent = self.spent_txs[key].pop(list_index) + + if len(self.spent_txs[key]) == 0: + # If this was the last input that spent this output, we recreate the output + output_tx = tx.storage.get_transaction(spent.from_tx_id) + output = output_tx.outputs[spent.from_index] + assert output is not None + + script_type_out = parse_address_script(output.script) + if script_type_out and script_type_out.address in self.keys: + utxo = UnspentTx(_input.tx_id, _input.index, output.value, + output_tx.timestamp, script_type_out.address, + output.token_data, timelock=script_type_out.timelock) + self.unspent_txs[token_id][key] = utxo + + should_update = True + + # Save in voided_spent, if it's not there yet + # First try to find it in voided_spent + voided_stxi_list = self.voided_spent.get(key, []) + list_index = -1 + for i, spent in enumerate(voided_stxi_list): + if (spent.tx_id == tx.hash and spent.from_index == _input.index + and spent.from_tx_id == _input.tx_id): + list_index = i + break + if list_index == -1: + # If it's not there, we add it + if output is None: + output_tx = tx.storage.get_transaction(_input.tx_id) + output = output_tx.outputs[_input.index] + + voided_spent = SpentTx(tx.hash, _input.tx_id, _input.index, output.value, tx.timestamp, + voided=True) + self.voided_spent[key].append(voided_spent) + should_update = True if should_update: # update balance @@ -730,27 +736,29 @@ def on_tx_winner(self, tx: Transaction) -> None: for index, output in enumerate(tx.outputs): script_type_out = parse_address_script(output.script) token_id = tx.get_token_uid(output.get_token_index()) - if script_type_out: - if script_type_out.address in self.keys: - # Find output - key = (tx.hash, index) - utxo = self.unspent_txs[token_id].get(key) - if utxo is None: - utxo = self.maybe_spent_txs[token_id].get(key) - if not utxo: - # Not found in unspent - # Try to find in spent tx - if key not in self.spent_txs or len(self.spent_txs[key]) == 0: - # If it's not in unspet or spent it was deleted, so we create again in the unspent - utxo = UnspentTx(tx.hash, index, output.value, tx.timestamp, script_type_out.address, - output.token_data, timelock=script_type_out.timelock) - self.unspent_txs[token_id][key] = utxo + if not script_type_out: + continue + if script_type_out.address not in self.keys: + continue + # Find output + key = (tx.hash, index) + utxo = self.unspent_txs[token_id].get(key) + if utxo is None: + utxo = self.maybe_spent_txs[token_id].get(key) + if not utxo: + # Not found in unspent + # Try to find in spent tx + if key not in self.spent_txs or len(self.spent_txs[key]) == 0: + # If it's not in unspet or spent it was deleted, so we create again in the unspent + utxo = UnspentTx(tx.hash, index, output.value, tx.timestamp, script_type_out.address, + output.token_data, timelock=script_type_out.timelock) + self.unspent_txs[token_id][key] = utxo - # Remove from voided_unspent, if it's there - voided_utxo = self.voided_unspent.pop(key, None) - if voided_utxo: - # If it's there, we should update - should_update = True + # Remove from voided_unspent, if it's there + voided_utxo = self.voided_unspent.pop(key, None) + if voided_utxo: + # If it's there, we should update + should_update = True # check inputs for _input in tx.inputs: @@ -759,57 +767,59 @@ def on_tx_winner(self, tx: Transaction) -> None: token_id = output_tx.get_token_uid(output.get_token_index()) script_type_out = parse_address_script(output.script) - if script_type_out: - if script_type_out.address in self.keys: - key = (_input.tx_id, _input.index) - # Remove from voided_spent, if it's there - # First try to find it in voided_spent - voided_stxi_list = self.voided_spent.get(key, []) - list_index = -1 - for i, spent in enumerate(voided_stxi_list): - if (spent.tx_id == tx.hash and spent.from_index == _input.index - and spent.from_tx_id == _input.tx_id): - list_index = i - break - if list_index > -1: - # If it's there, we remove it - self.voided_spent[key].pop(list_index) - should_update = True - - # Remove from unspent_txs, if it's there - old_utxo = self.unspent_txs[token_id].pop(key, None) - if old_utxo is None: - old_utxo = self.maybe_spent_txs[token_id].pop(key, None) - if old_utxo: - # add to spent_txs - spent = SpentTx(tx.hash, _input.tx_id, _input.index, old_utxo.value, tx.timestamp) - self.spent_txs[(_input.tx_id, _input.index)].append(spent) - should_update = True - continue - - # If we dont have it in the unspent_txs, we check in the spent txs - # Try to find in spent tx - found = False - if key in self.spent_txs: - list_index = -1 - for i, spent in enumerate(self.spent_txs[key]): - if (spent.tx_id == tx.hash and spent.from_index == _input.index - and spent.from_tx_id == _input.tx_id): - list_index = i - break - - if list_index > -1: - found = True - - if not found: - # If spent not found, we recreate it - # Get tx from output to get the value - output_tx = tx.storage.get_transaction(_input.tx_id) - output = output_tx.outputs[_input.index] - - spent = SpentTx(tx.hash, _input.tx_id, _input.index, output.value, tx.timestamp) - self.spent_txs[key].append(spent) - should_update = True + if not script_type_out: + continue + if script_type_out.address not in self.keys: + continue + key = (_input.tx_id, _input.index) + # Remove from voided_spent, if it's there + # First try to find it in voided_spent + voided_stxi_list = self.voided_spent.get(key, []) + list_index = -1 + for i, spent in enumerate(voided_stxi_list): + if (spent.tx_id == tx.hash and spent.from_index == _input.index + and spent.from_tx_id == _input.tx_id): + list_index = i + break + if list_index > -1: + # If it's there, we remove it + self.voided_spent[key].pop(list_index) + should_update = True + + # Remove from unspent_txs, if it's there + old_utxo = self.unspent_txs[token_id].pop(key, None) + if old_utxo is None: + old_utxo = self.maybe_spent_txs[token_id].pop(key, None) + if old_utxo: + # add to spent_txs + spent = SpentTx(tx.hash, _input.tx_id, _input.index, old_utxo.value, tx.timestamp) + self.spent_txs[(_input.tx_id, _input.index)].append(spent) + should_update = True + continue + + # If we dont have it in the unspent_txs, we check in the spent txs + # Try to find in spent tx + found = False + if key in self.spent_txs: + list_index = -1 + for i, spent in enumerate(self.spent_txs[key]): + if (spent.tx_id == tx.hash and spent.from_index == _input.index + and spent.from_tx_id == _input.tx_id): + list_index = i + break + + if list_index > -1: + found = True + + if not found: + # If spent not found, we recreate it + # Get tx from output to get the value + output_tx = tx.storage.get_transaction(_input.tx_id) + output = output_tx.outputs[_input.index] + + spent = SpentTx(tx.hash, _input.tx_id, _input.index, output.value, tx.timestamp) + self.spent_txs[key].append(spent) + should_update = True if should_update: # update balance diff --git a/hathor/wallet/hd_wallet.py b/hathor/wallet/hd_wallet.py index defa61e07..b3dcca1bf 100644 --- a/hathor/wallet/hd_wallet.py +++ b/hathor/wallet/hd_wallet.py @@ -51,7 +51,9 @@ def _register_pycoin_networks() -> None: class HDWallet(BaseWallet): - """ Hierarchical Deterministic Wallet based in BIP32 (https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki) + """ Hierarchical Deterministic Wallet based on BIP32. + + See: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki """ def __init__(self, *, words: Optional[Any] = None, language: str = 'english', passphrase: bytes = b'', diff --git a/hathor/wallet/resources/thin_wallet/send_tokens.py b/hathor/wallet/resources/thin_wallet/send_tokens.py index 8ba353893..219ba8912 100644 --- a/hathor/wallet/resources/thin_wallet/send_tokens.py +++ b/hathor/wallet/resources/thin_wallet/send_tokens.py @@ -202,7 +202,7 @@ def _stratum_deferred_resolve(self, context: _Context) -> None: funds_hash = context.tx.get_funds_hash() context.tx = self.manager.stratum_factory.mined_txs[funds_hash] # Delete it to avoid memory leak - del(self.manager.stratum_factory.mined_txs[funds_hash]) + del self.manager.stratum_factory.mined_txs[funds_hash] deferred = threads.deferToThreadPool(reactor, self.manager.pow_thread_pool, self._stratum_thread_verify, context) diff --git a/hathor/wallet/resources/unlock.py b/hathor/wallet/resources/unlock.py index 82051523f..55e820a51 100644 --- a/hathor/wallet/resources/unlock.py +++ b/hathor/wallet/resources/unlock.py @@ -53,7 +53,7 @@ def render_POST(self, request): # Wallet HD return self.unlock_wallet_hd(post_data) - def unlock_wallet_hd(self, data): + def unlock_wallet_hd(self, data: Dict[str, Any]) -> bytes: words = None if 'words' in data: words = data['words'] @@ -72,7 +72,7 @@ def unlock_wallet_hd(self, data): return json_dumpb(ret) - def unlock_wallet_keypair(self, data): + def unlock_wallet_keypair(self, data: Dict[str, Any]) -> bytes: password = bytes(data['password'], 'utf-8') ret: Dict[str, Any] = {} success = True diff --git a/hathor/websocket/factory.py b/hathor/websocket/factory.py index 32c027aae..7c4b636e9 100644 --- a/hathor/websocket/factory.py +++ b/hathor/websocket/factory.py @@ -13,7 +13,7 @@ # limitations under the License. from collections import defaultdict, deque -from typing import Any, DefaultDict, Deque, Dict, Optional, Set +from typing import Any, DefaultDict, Deque, Dict, Optional, Set, Union from autobahn.exception import Disconnected from autobahn.twisted.websocket import WebSocketServerFactory @@ -24,8 +24,8 @@ from hathor.indexes import AddressIndex from hathor.metrics import Metrics from hathor.p2p.rate_limiter import RateLimiter -from hathor.pubsub import HathorEvents -from hathor.util import json_dumpb, json_loadb, reactor +from hathor.pubsub import EventArguments, HathorEvents +from hathor.util import json_dumpb, json_loadb, json_loads, reactor from hathor.websocket.protocol import HathorAdminWebsocketProtocol settings = HathorSettings() @@ -171,7 +171,7 @@ def handle_publish(self, key, args): data['type'] = key.value self.send_or_enqueue(data) - def serialize_message_data(self, event, args): + def serialize_message_data(self, event: HathorEvents, args: EventArguments) -> Dict[str, Any]: """ Receives the event and the args from the pubsub and serializes the data so it can be passed in the websocket """ @@ -295,9 +295,12 @@ def process_deque(self, data_type): data_type=data_type) break - def handle_message(self, connection: HathorAdminWebsocketProtocol, data: bytes) -> None: + def handle_message(self, connection: HathorAdminWebsocketProtocol, data: Union[bytes, str]) -> None: """ General message handler, detects type and deletages to specific handler.""" - message = json_loadb(data) + if isinstance(data, bytes): + message = json_loadb(data) + else: + message = json_loads(data) # we only handle ping messages for now if message['type'] == 'ping': self._handle_ping(connection, message) diff --git a/hathor/websocket/protocol.py b/hathor/websocket/protocol.py index 1b7499f97..993cc2200 100644 --- a/hathor/websocket/protocol.py +++ b/hathor/websocket/protocol.py @@ -12,11 +12,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Set, Union +from typing import TYPE_CHECKING, Set, Union from autobahn.twisted.websocket import WebSocketServerProtocol from structlog import get_logger +if TYPE_CHECKING: + from hathor.websocket.factory import HathorAdminWebsocketFactory + logger = get_logger() @@ -27,7 +30,7 @@ class HathorAdminWebsocketProtocol(WebSocketServerProtocol): can send the data update to the clients """ - def __init__(self, factory): + def __init__(self, factory: 'HathorAdminWebsocketFactory') -> None: self.log = logger.new() self.factory = factory self.subscribed_to: Set[str] = set() diff --git a/poetry.lock b/poetry.lock index 210b60983..72e4cd6ee 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,101 @@ +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. + [[package]] name = "aiohttp" -version = "3.8.1" +version = "3.8.3" description = "Async http client/server framework (asyncio)" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ba71c9b4dcbb16212f334126cc3d8beb6af377f6703d9dc2d9fb3874fd667ee9"}, + {file = "aiohttp-3.8.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d24b8bb40d5c61ef2d9b6a8f4528c2f17f1c5d2d31fed62ec860f6006142e83e"}, + {file = "aiohttp-3.8.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f88df3a83cf9df566f171adba39d5bd52814ac0b94778d2448652fc77f9eb491"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b97decbb3372d4b69e4d4c8117f44632551c692bb1361b356a02b97b69e18a62"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:309aa21c1d54b8ef0723181d430347d7452daaff93e8e2363db8e75c72c2fb2d"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad5383a67514e8e76906a06741febd9126fc7c7ff0f599d6fcce3e82b80d026f"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20acae4f268317bb975671e375493dbdbc67cddb5f6c71eebdb85b34444ac46b"}, + {file = "aiohttp-3.8.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05a3c31c6d7cd08c149e50dc7aa2568317f5844acd745621983380597f027a18"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d6f76310355e9fae637c3162936e9504b4767d5c52ca268331e2756e54fd4ca5"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:256deb4b29fe5e47893fa32e1de2d73c3afe7407738bd3c63829874661d4822d"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5c59fcd80b9049b49acd29bd3598cada4afc8d8d69bd4160cd613246912535d7"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:059a91e88f2c00fe40aed9031b3606c3f311414f86a90d696dd982e7aec48142"}, + {file = "aiohttp-3.8.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2feebbb6074cdbd1ac276dbd737b40e890a1361b3cc30b74ac2f5e24aab41f7b"}, + {file = "aiohttp-3.8.3-cp310-cp310-win32.whl", hash = "sha256:5bf651afd22d5f0c4be16cf39d0482ea494f5c88f03e75e5fef3a85177fecdeb"}, + {file = "aiohttp-3.8.3-cp310-cp310-win_amd64.whl", hash = "sha256:653acc3880459f82a65e27bd6526e47ddf19e643457d36a2250b85b41a564715"}, + {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:86fc24e58ecb32aee09f864cb11bb91bc4c1086615001647dbfc4dc8c32f4008"}, + {file = "aiohttp-3.8.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75e14eac916f024305db517e00a9252714fce0abcb10ad327fb6dcdc0d060f1d"}, + {file = "aiohttp-3.8.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d1fde0f44029e02d02d3993ad55ce93ead9bb9b15c6b7ccd580f90bd7e3de476"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab94426ddb1ecc6a0b601d832d5d9d421820989b8caa929114811369673235c"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89d2e02167fa95172c017732ed7725bc8523c598757f08d13c5acca308e1a061"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a2c72fc95d59b881cf38a4b2be9381b9527f9d328771e90f72ac76f31ad8"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7149272fb5834fc186328e2c1fa01dda3e1fa940ce18fded6d412e8f2cf76d"}, + {file = "aiohttp-3.8.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:512bd5ab136b8dc0ffe3fdf2dfb0c4b4f49c8577f6cae55dca862cd37a4564e2"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7018ecc5fe97027214556afbc7c502fbd718d0740e87eb1217b17efd05b3d276"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:88c70ed9da9963d5496d38320160e8eb7e5f1886f9290475a881db12f351ab5d"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:da22885266bbfb3f78218dc40205fed2671909fbd0720aedba39b4515c038091"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:e65bc19919c910127c06759a63747ebe14f386cda573d95bcc62b427ca1afc73"}, + {file = "aiohttp-3.8.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:08c78317e950e0762c2983f4dd58dc5e6c9ff75c8a0efeae299d363d439c8e34"}, + {file = "aiohttp-3.8.3-cp311-cp311-win32.whl", hash = "sha256:45d88b016c849d74ebc6f2b6e8bc17cabf26e7e40c0661ddd8fae4c00f015697"}, + {file = "aiohttp-3.8.3-cp311-cp311-win_amd64.whl", hash = "sha256:96372fc29471646b9b106ee918c8eeb4cca423fcbf9a34daa1b93767a88a2290"}, + {file = "aiohttp-3.8.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c971bf3786b5fad82ce5ad570dc6ee420f5b12527157929e830f51c55dc8af77"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff25f48fc8e623d95eca0670b8cc1469a83783c924a602e0fbd47363bb54aaca"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e381581b37db1db7597b62a2e6b8b57c3deec95d93b6d6407c5b61ddc98aca6d"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db19d60d846283ee275d0416e2a23493f4e6b6028825b51290ac05afc87a6f97"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25892c92bee6d9449ffac82c2fe257f3a6f297792cdb18ad784737d61e7a9a85"}, + {file = "aiohttp-3.8.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:398701865e7a9565d49189f6c90868efaca21be65c725fc87fc305906be915da"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4a4fbc769ea9b6bd97f4ad0b430a6807f92f0e5eb020f1e42ece59f3ecfc4585"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:b29bfd650ed8e148f9c515474a6ef0ba1090b7a8faeee26b74a8ff3b33617502"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:1e56b9cafcd6531bab5d9b2e890bb4937f4165109fe98e2b98ef0dcfcb06ee9d"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ec40170327d4a404b0d91855d41bfe1fe4b699222b2b93e3d833a27330a87a6d"}, + {file = "aiohttp-3.8.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2df5f139233060578d8c2c975128fb231a89ca0a462b35d4b5fcf7c501ebdbe1"}, + {file = "aiohttp-3.8.3-cp36-cp36m-win32.whl", hash = "sha256:f973157ffeab5459eefe7b97a804987876dd0a55570b8fa56b4e1954bf11329b"}, + {file = "aiohttp-3.8.3-cp36-cp36m-win_amd64.whl", hash = "sha256:437399385f2abcd634865705bdc180c8314124b98299d54fe1d4c8990f2f9494"}, + {file = "aiohttp-3.8.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:09e28f572b21642128ef31f4e8372adb6888846f32fecb288c8b0457597ba61a"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f3553510abdbec67c043ca85727396ceed1272eef029b050677046d3387be8d"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e168a7560b7c61342ae0412997b069753f27ac4862ec7867eff74f0fe4ea2ad9"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:db4c979b0b3e0fa7e9e69ecd11b2b3174c6963cebadeecfb7ad24532ffcdd11a"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e164e0a98e92d06da343d17d4e9c4da4654f4a4588a20d6c73548a29f176abe2"}, + {file = "aiohttp-3.8.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8a78079d9a39ca9ca99a8b0ac2fdc0c4d25fc80c8a8a82e5c8211509c523363"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21b30885a63c3f4ff5b77a5d6caf008b037cb521a5f33eab445dc566f6d092cc"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4b0f30372cef3fdc262f33d06e7b411cd59058ce9174ef159ad938c4a34a89da"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:8135fa153a20d82ffb64f70a1b5c2738684afa197839b34cc3e3c72fa88d302c"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:ad61a9639792fd790523ba072c0555cd6be5a0baf03a49a5dd8cfcf20d56df48"}, + {file = "aiohttp-3.8.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:978b046ca728073070e9abc074b6299ebf3501e8dee5e26efacb13cec2b2dea0"}, + {file = "aiohttp-3.8.3-cp37-cp37m-win32.whl", hash = "sha256:0d2c6d8c6872df4a6ec37d2ede71eff62395b9e337b4e18efd2177de883a5033"}, + {file = "aiohttp-3.8.3-cp37-cp37m-win_amd64.whl", hash = "sha256:21d69797eb951f155026651f7e9362877334508d39c2fc37bd04ff55b2007091"}, + {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ca9af5f8f5812d475c5259393f52d712f6d5f0d7fdad9acdb1107dd9e3cb7eb"}, + {file = "aiohttp-3.8.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d90043c1882067f1bd26196d5d2db9aa6d268def3293ed5fb317e13c9413ea4"}, + {file = "aiohttp-3.8.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d737fc67b9a970f3234754974531dc9afeea11c70791dcb7db53b0cf81b79784"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf909ea0a3fc9596e40d55d8000702a85e27fd578ff41a5500f68f20fd32e6c"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5835f258ca9f7c455493a57ee707b76d2d9634d84d5d7f62e77be984ea80b849"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da37dcfbf4b7f45d80ee386a5f81122501ec75672f475da34784196690762f4b"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87f44875f2804bc0511a69ce44a9595d5944837a62caecc8490bbdb0e18b1342"}, + {file = "aiohttp-3.8.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:527b3b87b24844ea7865284aabfab08eb0faf599b385b03c2aa91fc6edd6e4b6"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d5ba88df9aa5e2f806650fcbeedbe4f6e8736e92fc0e73b0400538fd25a4dd96"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e7b8813be97cab8cb52b1375f41f8e6804f6507fe4660152e8ca5c48f0436017"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2dea10edfa1a54098703cb7acaa665c07b4e7568472a47f4e64e6319d3821ccf"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:713d22cd9643ba9025d33c4af43943c7a1eb8547729228de18d3e02e278472b6"}, + {file = "aiohttp-3.8.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2d252771fc85e0cf8da0b823157962d70639e63cb9b578b1dec9868dd1f4f937"}, + {file = "aiohttp-3.8.3-cp38-cp38-win32.whl", hash = "sha256:66bd5f950344fb2b3dbdd421aaa4e84f4411a1a13fca3aeb2bcbe667f80c9f76"}, + {file = "aiohttp-3.8.3-cp38-cp38-win_amd64.whl", hash = "sha256:84b14f36e85295fe69c6b9789b51a0903b774046d5f7df538176516c3e422446"}, + {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16c121ba0b1ec2b44b73e3a8a171c4f999b33929cd2397124a8c7fcfc8cd9e06"}, + {file = "aiohttp-3.8.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8d6aaa4e7155afaf994d7924eb290abbe81a6905b303d8cb61310a2aba1c68ba"}, + {file = "aiohttp-3.8.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43046a319664a04b146f81b40e1545d4c8ac7b7dd04c47e40bf09f65f2437346"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599418aaaf88a6d02a8c515e656f6faf3d10618d3dd95866eb4436520096c84b"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92a2964319d359f494f16011e23434f6f8ef0434acd3cf154a6b7bec511e2fb7"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73a4131962e6d91109bca6536416aa067cf6c4efb871975df734f8d2fd821b37"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598adde339d2cf7d67beaccda3f2ce7c57b3b412702f29c946708f69cf8222aa"}, + {file = "aiohttp-3.8.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75880ed07be39beff1881d81e4a907cafb802f306efd6d2d15f2b3c69935f6fb"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0239da9fbafd9ff82fd67c16704a7d1bccf0d107a300e790587ad05547681c8"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4e3a23ec214e95c9fe85a58470b660efe6534b83e6cbe38b3ed52b053d7cb6ad"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:47841407cc89a4b80b0c52276f3cc8138bbbfba4b179ee3acbd7d77ae33f7ac4"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:54d107c89a3ebcd13228278d68f1436d3f33f2dd2af5415e3feaeb1156e1a62c"}, + {file = "aiohttp-3.8.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c37c5cce780349d4d51739ae682dec63573847a2a8dcb44381b174c3d9c8d403"}, + {file = "aiohttp-3.8.3-cp39-cp39-win32.whl", hash = "sha256:f178d2aadf0166be4df834c4953da2d7eef24719e8aec9a65289483eeea9d618"}, + {file = "aiohttp-3.8.3-cp39-cp39-win_amd64.whl", hash = "sha256:88e5be56c231981428f4f506c68b6a46fa25c4123a2e86d156c58a8369d31ab7"}, + {file = "aiohttp-3.8.3.tar.gz", hash = "sha256:3828fb41b7203176b82fe5d699e0d845435f2374750a44b480ea6b930f6be269"}, +] [package.dependencies] aiosignal = ">=1.1.2" @@ -20,11 +111,15 @@ speedups = ["Brotli", "aiodns", "cchardet"] [[package]] name = "aiosignal" -version = "1.2.0" +version = "1.3.1" description = "aiosignal: a list of registered asynchronous callbacks" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] [package.dependencies] frozenlist = ">=1.1.0" @@ -36,14 +131,22 @@ description = "Disable App Nap on macOS >= 10.9" category = "main" optional = false python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] [[package]] name = "asttokens" -version = "2.0.5" +version = "2.2.1" description = "Annotate AST trees with source code positions" category = "main" optional = false python-versions = "*" +files = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] [package.dependencies] six = "*" @@ -58,36 +161,39 @@ description = "Timeout context manager for asyncio programs" category = "main" optional = false python-versions = ">=3.6" - -[[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] [[package]] name = "attrs" -version = "21.4.0" +version = "22.1.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5" +files = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] [[package]] name = "autobahn" -version = "22.4.2" +version = "22.7.1" description = "WebSocket client & server library, WAMP real-time framework" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "autobahn-22.7.1.tar.gz", hash = "sha256:8b462ea2e6aad6b4dc0ed45fb800b6cbfeb0325e7fe6983907f122f2be4a1fe9"}, +] [package.dependencies] cryptography = ">=3.4.6" @@ -96,25 +202,28 @@ setuptools = "*" txaio = ">=21.2.1" [package.extras] -accelerate = ["wsaccel (>=0.6.3)"] -all = ["PyGObject (>=3.40.0)", "argon2_cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=2.1.1)", "flatbuffers (>=1.12)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pyqrcode (>=1.2.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "rlp (>=2.0.1)", "service_identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "ujson (>=4.0.2)", "web3 (>=5.16.0)", "wsaccel (>=0.6.3)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] +all = ["PyGObject (>=3.40.0)", "argon2_cffi (>=20.1.0)", "attrs (>=20.3.0)", "base58 (>=2.1.0)", "cbor2 (>=5.2.0)", "cffi (>=1.14.5)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=2.1.1)", "flatbuffers (>=1.12)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "msgpack (>=1.0.2)", "passlib (>=1.7.4)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "py-ubjson (>=0.16.1)", "pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "python-snappy (>=0.6.0)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "rlp (>=2.0.1)", "service_identity (>=18.1.0)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "ujson (>=4.0.2)", "web3 (>=5.29.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)", "zope.interface (>=5.2.0)"] compress = ["python-snappy (>=0.6.0)"] -dev = ["awscli", "backports.tempfile (>=1.0)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (>=3.5.0)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx_rtd_theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=2.9.1)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=18.7.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] -encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pyqrcode (>=1.2.1)", "pytrie (>=0.4.0)", "service_identity (>=18.1.0)"] +dev = ["awscli", "backports.tempfile (>=1.0)", "bumpversion (>=0.5.3)", "codecov (>=2.0.15)", "flake8 (<5)", "humanize (>=0.5.1)", "mypy (>=0.610)", "passlib", "pep8-naming (>=0.3.3)", "pip (>=9.0.1)", "pyenchant (>=1.6.6)", "pyflakes (>=1.0.0)", "pyinstaller (>=4.2)", "pylint (>=1.9.2)", "pytest (>=3.4.2)", "pytest-aiohttp", "pytest-asyncio (>=0.14.0)", "pytest-runner (>=2.11.1)", "pyyaml (>=4.2b4)", "qualname", "sphinx (>=1.7.1)", "sphinx-autoapi (>=1.7.0)", "sphinx_rtd_theme (>=0.1.9)", "sphinxcontrib-images (>=0.9.1)", "tox (>=2.9.1)", "tox-gh-actions (>=2.2.0)", "twine (>=3.3.0)", "twisted (>=18.7.0)", "txaio (>=20.4.1)", "watchdog (>=0.8.3)", "wheel (>=0.36.2)", "yapf (==0.29.0)"] +encryption = ["pynacl (>=1.4.0)", "pyopenssl (>=20.0.1)", "pytrie (>=0.4.0)", "qrcode (>=7.3.1)", "service_identity (>=18.1.0)"] nvx = ["cffi (>=1.14.5)"] scram = ["argon2_cffi (>=20.1.0)", "cffi (>=1.14.5)", "passlib (>=1.7.4)"] serialization = ["cbor2 (>=5.2.0)", "flatbuffers (>=1.12)", "msgpack (>=1.0.2)", "py-ubjson (>=0.16.1)", "ujson (>=4.0.2)"] twisted = ["attrs (>=20.3.0)", "twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] ui = ["PyGObject (>=3.40.0)"] -xbr = ["base58 (>=2.1.0)", "cbor2 (>=5.2.0)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=2.1.1)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "rlp (>=2.0.1)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "web3 (>=5.16.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)"] +xbr = ["base58 (>=2.1.0)", "cbor2 (>=5.2.0)", "click (>=8.1.2)", "ecdsa (>=0.16.1)", "eth-abi (>=2.1.1)", "hkdf (>=0.0.3)", "jinja2 (>=2.11.3)", "mnemonic (>=0.19)", "py-ecc (>=5.1.0)", "py-eth-sig-utils (>=0.4.0)", "py-multihash (>=2.0.1)", "rlp (>=2.0.1)", "spake2 (>=0.8)", "twisted (>=20.3.0)", "web3 (>=5.29.0)", "xbr (>=21.2.1)", "yapf (==0.29.0)", "zlmdb (>=21.2.1)"] [[package]] name = "automat" -version = "20.2.0" +version = "22.10.0" description = "Self-service finite-state machines for the programmer on the go." category = "main" optional = false python-versions = "*" +files = [ + {file = "Automat-22.10.0-py2.py3-none-any.whl", hash = "sha256:c3164f8742b9dc440f3682482d32aaff7bb53f71740dd018533f9de286b64180"}, + {file = "Automat-22.10.0.tar.gz", hash = "sha256:e56beb84edad19dcc11d30e8d9b895f75deeb5ef5e96b84a467066b3b84bb04e"}, +] [package.dependencies] attrs = ">=19.2.0" @@ -130,6 +239,10 @@ description = "Specifications for callback functions passed in to an API" category = "main" optional = false python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] [[package]] name = "base58" @@ -138,47 +251,129 @@ description = "Base58 and Base58Check implementation." category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2"}, + {file = "base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c"}, +] [package.extras] tests = ["PyHamcrest (>=2.0.2)", "mypy", "pytest (>=4.6)", "pytest-benchmark", "pytest-cov", "pytest-flake8"] [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.12.7" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] [[package]] name = "cffi" -version = "1.15.0" +version = "1.15.1" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] [package.dependencies] pycparser = "*" [[package]] name = "charset-normalizer" -version = "2.0.12" +version = "2.1.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.5.0" +python-versions = ">=3.6.0" +files = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] [package.extras] unicode-backport = ["unicodedata2"] [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.6" description = "Cross-platform colored terminal text." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] [[package]] name = "configargparse" @@ -187,6 +382,10 @@ description = "A drop-in replacement for argparse that allows options to also be category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "ConfigArgParse-1.5.3-py3-none-any.whl", hash = "sha256:18f6535a2db9f6e02bd5626cc7455eac3e96b9ab3d969d366f9aafd5c5c00fe7"}, + {file = "ConfigArgParse-1.5.3.tar.gz", hash = "sha256:1b0b3cbf664ab59dada57123c81eff3d9737e0d11d8cf79e3d6eb10823f1739f"}, +] [package.extras] test = ["PyYAML", "mock", "pytest"] @@ -199,14 +398,70 @@ description = "Symbolic constants in Python" category = "main" optional = false python-versions = "*" +files = [ + {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, + {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, +] [[package]] name = "coverage" -version = "6.4.4" +version = "6.5.0" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} @@ -216,11 +471,39 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "38.0.3" +version = "38.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70"}, + {file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb"}, + {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d"}, + {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1"}, + {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8"}, + {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db"}, + {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b"}, + {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c"}, + {file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00"}, + {file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0"}, + {file = "cryptography-38.0.4-cp36-abi3-win32.whl", hash = "sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744"}, + {file = "cryptography-38.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d"}, + {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353"}, + {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7"}, + {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd"}, + {file = "cryptography-38.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2"}, + {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b"}, + {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6"}, + {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876"}, + {file = "cryptography-38.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b"}, + {file = "cryptography-38.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285"}, + {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b"}, + {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083"}, + {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee"}, + {file = "cryptography-38.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9"}, + {file = "cryptography-38.0.4.tar.gz", hash = "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290"}, +] [package.dependencies] cffi = ">=1.12" @@ -233,14 +516,6 @@ sdist = ["setuptools-rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] -[[package]] -name = "cython" -version = "0.29.28" -description = "The Cython compiler for writing C extensions for the Python language." -category = "main" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" - [[package]] name = "decorator" version = "5.1.1" @@ -248,27 +523,72 @@ description = "Decorators for Humans" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.0.4" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, + {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "1.9.0" +description = "execnet: rapid multi-Python deployment" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "execnet-1.9.0-py2.py3-none-any.whl", hash = "sha256:a295f7cc774947aac58dde7fdc85f4aa00c42adf5d8f5468fc630c1acf30a142"}, + {file = "execnet-1.9.0.tar.gz", hash = "sha256:8f694f3ba9cc92cab508b152dcfe322153975c29bda272e2fd7f3f00f36e47c5"}, +] + +[package.extras] +testing = ["pre-commit"] [[package]] name = "executing" -version = "0.9.1" +version = "1.2.0" description = "Get the currently executing AST node of a frame, and other information" category = "main" optional = false python-versions = "*" +files = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "flake8" -version = "4.0.1" +version = "6.0.0" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, + {file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"}, +] [package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.10.0,<2.11.0" +pyflakes = ">=3.0.0,<3.1.0" [[package]] name = "flaky" @@ -277,39 +597,128 @@ description = "Plugin for nose or pytest that automatically reruns flaky tests." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "flaky-3.7.0-py2.py3-none-any.whl", hash = "sha256:d6eda73cab5ae7364504b7c44670f70abed9e75f77dd116352f662817592ec9c"}, + {file = "flaky-3.7.0.tar.gz", hash = "sha256:3ad100780721a1911f57a165809b7ea265a7863305acb66708220820caf8aa0d"}, +] [[package]] name = "frozenlist" -version = "1.3.0" +version = "1.3.3" description = "A list-like structure which implements collections.abc.MutableSequence" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] [[package]] name = "graphviz" -version = "0.20" +version = "0.20.1" description = "Simple Python interface for Graphviz" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "graphviz-0.20.1-py3-none-any.whl", hash = "sha256:587c58a223b51611c0cf461132da386edd896a029524ca61a1462b880bf97977"}, + {file = "graphviz-0.20.1.zip", hash = "sha256:8c58f14adaa3b947daf26c19bc1e98c4e0702cdc31cf99153e6f06904d492bf8"}, +] [package.extras] dev = ["flake8", "pep8-naming", "tox (>=3)", "twine", "wheel"] -docs = ["sphinx (>=4)", "sphinx-autodoc-typehints", "sphinx-rtd-theme"] +docs = ["sphinx (>=5)", "sphinx-autodoc-typehints", "sphinx-rtd-theme"] test = ["coverage", "mock (>=4)", "pytest (>=7)", "pytest-cov", "pytest-mock (>=3)"] [[package]] name = "hathorlib" -version = "0.2.0" +version = "0.3.0" description = "Hathor Network base objects library" category = "main" optional = false python-versions = ">=3.6,<4.0" +files = [ + {file = "hathorlib-0.3.0-py3-none-any.whl", hash = "sha256:079c2adbe0a28052e1db224324ca0cb8edbe6c3ced6ee5de09bb289cbed4c4e2"}, + {file = "hathorlib-0.3.0.tar.gz", hash = "sha256:0d268666504c9bd92369de889ebc292c077ab37b12b18b3383dfddb3d8b14741"}, +] [package.dependencies] base58 = ">=2.1.0" -cryptography = ">=3.3.1" +cryptography = ">=38.0.3" +pycoin = ">=0.92.20220529,<0.93.0" [package.extras] client = ["aiohttp (>=3.7.0)", "structlog (>=20.0.0)"] @@ -321,27 +730,40 @@ description = "A featureful, immutable, and correct URL for Python." category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, + {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, +] [package.dependencies] idna = ">=2.5" [[package]] name = "idna" -version = "3.3" +version = "3.4" description = "Internationalized Domain Names in Applications (IDNA)" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] [[package]] name = "incremental" -version = "21.3.0" -description = "A small library that versions your Python projects." +version = "22.10.0" +description = "\"A small library that versions your Python projects.\"" category = "main" optional = false python-versions = "*" +files = [ + {file = "incremental-22.10.0-py2.py3-none-any.whl", hash = "sha256:b864a1f30885ee72c5ac2835a761b8fe8aa9c28b9395cacf27286602688d3e51"}, + {file = "incremental-22.10.0.tar.gz", hash = "sha256:912feeb5e0f7e0188e6f42241d2f450002e11bbc0937c65865045854c24c0bd0"}, +] [package.extras] +mypy = ["click (>=6.0)", "mypy (==0.812)", "twisted (>=16.4.0)"] scripts = ["click (>=6.0)", "twisted (>=16.4.0)"] [[package]] @@ -351,6 +773,10 @@ description = "iniconfig: brain-dead simple config-ini parsing" category = "dev" optional = false python-versions = "*" +files = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] [[package]] name = "intervaltree" @@ -359,17 +785,24 @@ description = "Editable interval tree data structure for Python 2 and 3" category = "main" optional = false python-versions = "*" +files = [ + {file = "intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d"}, +] [package.dependencies] sortedcontainers = ">=2.0,<3.0" [[package]] name = "ipython" -version = "8.4.0" +version = "8.7.0" description = "IPython: Productive Interactive Computing" category = "main" optional = false python-versions = ">=3.8" +files = [ + {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, + {file = "ipython-8.7.0.tar.gz", hash = "sha256:882899fe78d5417a0aa07f995db298fa28b58faeba2112d2e3a4c95fe14bb738"}, +] [package.dependencies] appnope = {version = "*", markers = "sys_platform == \"darwin\""} @@ -380,16 +813,15 @@ jedi = ">=0.16" matplotlib-inline = "*" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +prompt-toolkit = ">=3.0.11,<3.1.0" pygments = ">=2.4.0" -setuptools = ">=18.5" stack-data = "*" traitlets = ">=5" [package.extras] -all = ["Sphinx (>=1.3)", "black", "curio", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.19)", "pandas", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "testpath", "trio"] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.20)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] black = ["black"] -doc = ["Sphinx (>=1.3)"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] kernel = ["ipykernel"] nbconvert = ["nbconvert"] nbformat = ["nbformat"] @@ -397,7 +829,7 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] [[package]] name = "isort" @@ -406,6 +838,10 @@ description = "A Python utility / library to sort Python imports." category = "dev" optional = false python-versions = ">=3.6.1,<4.0" +files = [ + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, +] [package.dependencies] colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"colors\""} @@ -418,37 +854,50 @@ requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" -version = "0.18.1" +version = "0.18.2" description = "An autocompletion tool for Python that can be used for text editors." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, + {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, +] [package.dependencies] parso = ">=0.8.0,<0.9.0" [package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "matplotlib-inline" -version = "0.1.3" +version = "0.1.6" description = "Inline Matplotlib backend for Jupyter" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] [package.dependencies] traitlets = "*" [[package]] name = "mccabe" -version = "0.6.1" +version = "0.7.0" description = "McCabe checker, plugin for flake8" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] [[package]] name = "mnemonic" @@ -457,22 +906,130 @@ description = "Implementation of Bitcoin BIP-0039" category = "main" optional = false python-versions = ">=3.5" +files = [ + {file = "mnemonic-0.20-py3-none-any.whl", hash = "sha256:acd2168872d0379e7a10873bb3e12bf6c91b35de758135c4fbd1015ef18fafc5"}, + {file = "mnemonic-0.20.tar.gz", hash = "sha256:7c6fb5639d779388027a77944680aee4870f0fcd09b1e42a5525ee2ce4c625f6"}, +] [[package]] name = "multidict" -version = "6.0.2" +version = "6.0.3" description = "multidict implementation" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73009ea04205966d47e16d98686ac5c438af23a1bb30b48a2c5da3423ec9ce37"}, + {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b92a9f3ab904397a33b193000dc4de7318ea175c4c460a1e154c415f9008e3d"}, + {file = "multidict-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:578bfcb16f4b8675ef71b960c00f174b0426e0eeb796bab6737389d8288eb827"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1650ea41c408755da5eed52ac6ccbc8938ccc3e698d81e6f6a1be02ff2a0945"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d52442e7c951e4c9ee591d6047706e66923d248d83958bbf99b8b19515fffaef"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad7d66422b9cc51125509229693d27e18c08f2dea3ac9de408d821932b1b3759"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cd14e61f0da2a2cfb9fe05bfced2a1ed7063ce46a7a8cd473be4973de9a7f91"}, + {file = "multidict-6.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:190626ced82d4cc567a09e7346340d380154a493bac6905e0095d8158cdf1e38"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:791458a1f7d1b4ab3bd9e93e0dcd1d59ef7ee9aa051dcd1ea030e62e49b923fd"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b46e79a9f4db53897d17bc64a39d1c7c2be3e3d4f8dba6d6730a2b13ddf0f986"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e4a095e18847c12ec20e55326ab8782d9c2d599400a3a2f174fab4796875d0e2"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544"}, + {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3541882266247c7cd3dba78d6ef28dbe704774df60c9e4231edaa4493522e614"}, + {file = "multidict-6.0.3-cp310-cp310-win32.whl", hash = "sha256:67090b17a0a5be5704fd109f231ee73cefb1b3802d41288d6378b5df46ae89ba"}, + {file = "multidict-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:36df958b15639e40472adaa4f0c2c7828fe680f894a6b48c4ce229f59a6a798b"}, + {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b51969503709415a35754954c2763f536a70b8bf7360322b2edb0c0a44391f6"}, + {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24e8d513bfcaadc1f8b0ebece3ff50961951c54b07d5a775008a882966102418"}, + {file = "multidict-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d325d61cac602976a5d47b19eaa7d04e3daf4efce2164c630219885087234102"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbbe17f8a7211b623502d2bf41022a51da3025142401417c765bf9a56fed4c"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3fe591956d8841882c463f934c9f7485cfd5f763a08c0d467b513dc18ef89"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1925f78a543b94c3d46274c66a366fee8a263747060220ed0188e5f3eeea1c0"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e1ce0b187c4e93112304dcde2aa18922fdbe8fb4f13d8aa72a5657bce0563a"}, + {file = "multidict-6.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e07c24018986fb00d6e7eafca8fcd6e05095649e17fcf0e33a592caaa62a78b9"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:114a4ab3e5cfbc56c4b6697686ecb92376c7e8c56893ef20547921552f8bdf57"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ccf55f28066b4f08666764a957c2b7c241c7547b0921d69c7ceab5f74fe1a45"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:9d359b0a962e052b713647ac1f13eabf2263167b149ed1e27d5c579f5c8c7d2c"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df7b4cee3ff31b3335aba602f8d70dbc641e5b7164b1e9565570c9d3c536a438"}, + {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ee9b1cae9a6c5d023e5a150f6f6b9dbb3c3bbc7887d6ee07d4c0ecb49a473734"}, + {file = "multidict-6.0.3-cp311-cp311-win32.whl", hash = "sha256:960ce1b790952916e682093788696ef7e33ac6a97482f9b983abdc293091b531"}, + {file = "multidict-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:2b66d61966b12e6bba500e5cbb2c721a35e119c30ee02495c5629bd0e91eea30"}, + {file = "multidict-6.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:526f8397fc124674b8f39748680a0ff673bd6a715fecb4866716d36e380f015f"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d5129a937af4e3c4a1d6c139f4051b7d17d43276cefdd8d442a7031f7eef2"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38d394814b39be1c36ac709006d39d50d72a884f9551acd9c8cc1ffae3fc8c4e"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99341ca1f1db9e7f47914cb2461305665a662383765ced6f843712564766956d"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5790cc603456b6dcf8a9a4765f666895a6afddc88b3d3ba7b53dea2b6e23116"}, + {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce8e51774eb03844588d3c279adb94efcd0edeccd2f97516623292445bcc01f9"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:baa96a3418e27d723064854143b2f414a422c84cc87285a71558722049bebc5a"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cb4a08f0aaaa869f189ffea0e17b86ad0237b51116d494da15ef7991ee6ad2d7"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:62db44727d0befea68e8ad2881bb87a9cfb6b87d45dd78609009627167f37b69"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:4cc5c8cd205a9810d16a5cd428cd81bac554ad1477cb87f4ad722b10992e794d"}, + {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000"}, + {file = "multidict-6.0.3-cp37-cp37m-win32.whl", hash = "sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253"}, + {file = "multidict-6.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c2a1168e5aa7c72499fb03c850e0f03f624fa4a5c8d2e215c518d0a73872eb64"}, + {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eddf604a3de2ace3d9a4e4d491be7562a1ac095a0a1c95a9ec5781ef0273ef11"}, + {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d09daf5c6ce7fc6ed444c9339bbde5ea84e2534d1ca1cd37b60f365c77f00dea"}, + {file = "multidict-6.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:12e0d396faa6dc55ff5379eee54d1df3b508243ff15bfc8295a6ec7a4483a335"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70740c2bc9ab1c99f7cdcb104f27d16c63860c56d51c5bf0ef82fc1d892a2131"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e322c94596054352f5a02771eec71563c018b15699b961aba14d6dd943367022"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4159fc1ec9ede8ab93382e0d6ba9b1b3d23c72da39a834db7a116986605c7ab4"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47defc0218682281a52fb1f6346ebb8b68b17538163a89ea24dfe4da37a8a9a3"}, + {file = "multidict-6.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f9511e48bde6b995825e8d35e434fc96296cf07a25f4aae24ff9162be7eaa46"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bce9f7c30e7e3a9e683f670314c0144e8d34be6b7019e40604763bd278d84f"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8230a39bae6c2e8a09e4da6bace5064693b00590a4a213e38f9a9366da10e7dd"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:445c0851a1cbc1f2ec3b40bc22f9c4a235edb3c9a0906122a9df6ea8d51f886c"}, + {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9aac6881454a750554ed4b280a839dcf9e2133a9d12ab4d417d673fb102289b7"}, + {file = "multidict-6.0.3-cp38-cp38-win32.whl", hash = "sha256:81c3d597591b0940e04949e4e4f79359b2d2e542a686ba0da5e25de33fec13e0"}, + {file = "multidict-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:dc4cfef5d899f5f1a15f3d2ac49f71107a01a5a2745b4dd53fa0cede1419385a"}, + {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d408172519049e36fb6d29672f060dc8461fc7174eba9883c7026041ef9bfb38"}, + {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e068dfeadbce63072b2d8096486713d04db4946aad0a0f849bd4fc300799d0d3"}, + {file = "multidict-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8b817d4ed68fd568ec5e45dd75ddf30cc72a47a6b41b74d5bb211374c296f5e"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf5d19e12eff855aa198259c0b02fd3f5d07e1291fbd20279c37b3b0e6c9852"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5a811aab1b4aea0b4be669363c19847a8c547510f0e18fb632956369fdbdf67"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cfda34b7cb99eacada2072e0f69c0ad3285cb6f8e480b11f2b6d6c1c6f92718"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beeca903e4270b4afcd114f371a9602240dc143f9e944edfea00f8d4ad56c40d"}, + {file = "multidict-6.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd5771e8ea325f85cbb361ddbdeb9ae424a68e5dfb6eea786afdcd22e68a7d5d"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9dbab2a7e9c073bc9538824a01f5ed689194db7f55f2b8102766873e906a6c1a"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f2c0957b3e8c66c10d27272709a5299ab3670a0f187c9428f3b90d267119aedb"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:94cbe5535ef150546b8321aebea22862a3284da51e7b55f6f95b7d73e96d90ee"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0e798b072cf2aab9daceb43d97c9c527a0c7593e67a7846ad4cc6051de1e303"}, + {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a27b029caa3b555a4f3da54bc1e718eb55fcf1a11fda8bf0132147b476cf4c08"}, + {file = "multidict-6.0.3-cp39-cp39-win32.whl", hash = "sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce"}, + {file = "multidict-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5e58ec0375803526d395f6f7e730ecc45d06e15f68f7b9cdbf644a2918324e51"}, + {file = "multidict-6.0.3.tar.gz", hash = "sha256:2523a29006c034687eccd3ee70093a697129a3ffe8732535d3b2df6a4ecc279d"}, +] [[package]] name = "mypy" -version = "0.950" +version = "1.0.0" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "mypy-1.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0626db16705ab9f7fa6c249c017c887baf20738ce7f9129da162bb3075fc1af"}, + {file = "mypy-1.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1ace23f6bb4aec4604b86c4843276e8fa548d667dbbd0cb83a3ae14b18b2db6c"}, + {file = "mypy-1.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87edfaf344c9401942883fad030909116aa77b0fa7e6e8e1c5407e14549afe9a"}, + {file = "mypy-1.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0ab090d9240d6b4e99e1fa998c2d0aa5b29fc0fb06bd30e7ad6183c95fa07593"}, + {file = "mypy-1.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:7cc2c01dfc5a3cbddfa6c13f530ef3b95292f926329929001d45e124342cd6b7"}, + {file = "mypy-1.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14d776869a3e6c89c17eb943100f7868f677703c8a4e00b3803918f86aafbc52"}, + {file = "mypy-1.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb2782a036d9eb6b5a6efcdda0986774bf798beef86a62da86cb73e2a10b423d"}, + {file = "mypy-1.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cfca124f0ac6707747544c127880893ad72a656e136adc935c8600740b21ff5"}, + {file = "mypy-1.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8845125d0b7c57838a10fd8925b0f5f709d0e08568ce587cc862aacce453e3dd"}, + {file = "mypy-1.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b1b9e1ed40544ef486fa8ac022232ccc57109f379611633ede8e71630d07d2"}, + {file = "mypy-1.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c7cf862aef988b5fbaa17764ad1d21b4831436701c7d2b653156a9497d92c83c"}, + {file = "mypy-1.0.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd187d92b6939617f1168a4fe68f68add749902c010e66fe574c165c742ed88"}, + {file = "mypy-1.0.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4e5175026618c178dfba6188228b845b64131034ab3ba52acaffa8f6c361f805"}, + {file = "mypy-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2f6ac8c87e046dc18c7d1d7f6653a66787a4555085b056fe2d599f1f1a2a2d21"}, + {file = "mypy-1.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7306edca1c6f1b5fa0bc9aa645e6ac8393014fa82d0fa180d0ebc990ebe15964"}, + {file = "mypy-1.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3cfad08f16a9c6611e6143485a93de0e1e13f48cfb90bcad7d5fde1c0cec3d36"}, + {file = "mypy-1.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67cced7f15654710386e5c10b96608f1ee3d5c94ca1da5a2aad5889793a824c1"}, + {file = "mypy-1.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a86b794e8a56ada65c573183756eac8ac5b8d3d59daf9d5ebd72ecdbb7867a43"}, + {file = "mypy-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:50979d5efff8d4135d9db293c6cb2c42260e70fb010cbc697b1311a4d7a39ddb"}, + {file = "mypy-1.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ae4c7a99e5153496243146a3baf33b9beff714464ca386b5f62daad601d87af"}, + {file = "mypy-1.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e398652d005a198a7f3c132426b33c6b85d98aa7dc852137a2a3be8890c4072"}, + {file = "mypy-1.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be78077064d016bc1b639c2cbcc5be945b47b4261a4f4b7d8923f6c69c5c9457"}, + {file = "mypy-1.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92024447a339400ea00ac228369cd242e988dd775640755fa4ac0c126e49bb74"}, + {file = "mypy-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:fe523fcbd52c05040c7bee370d66fee8373c5972171e4fbc323153433198592d"}, + {file = "mypy-1.0.0-py3-none-any.whl", hash = "sha256:2efa963bdddb27cb4a0d42545cd137a8d2b883bd181bbc4525b568ef6eca258f"}, + {file = "mypy-1.0.0.tar.gz", hash = "sha256:f34495079c8d9da05b183f9f7daec2878280c2ad7cc81da686ef0b484cea2ecf"}, +] [package.dependencies] mypy-extensions = ">=0.4.3" @@ -481,6 +1038,7 @@ typing-extensions = ">=3.10" [package.extras] dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] @@ -491,17 +1049,25 @@ description = "Experimental type system extensions for programs checked with the category = "dev" optional = false python-versions = "*" +files = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] [[package]] name = "mypy-zope" -version = "0.3.7" +version = "0.9.0" description = "Plugin for mypy to support zope interfaces" category = "dev" optional = false python-versions = "*" +files = [ + {file = "mypy-zope-0.9.0.tar.gz", hash = "sha256:88bf6cd056e38b338e6956055958a7805b4ff84404ccd99e29883a3647a1aeb3"}, + {file = "mypy_zope-0.9.0-py3-none-any.whl", hash = "sha256:e1bb4b57084f76ff8a154a3e07880a1af2ac6536c491dad4b143d529f72c5d15"}, +] [package.dependencies] -mypy = "0.950" +mypy = "1.0.0" "zope.interface" = "*" "zope.schema" = "*" @@ -510,14 +1076,15 @@ test = ["lxml", "pytest (>=4.6)", "pytest-cov"] [[package]] name = "packaging" -version = "21.3" +version = "22.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" +python-versions = ">=3.7" +files = [ + {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, + {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, +] [[package]] name = "parso" @@ -526,6 +1093,10 @@ description = "A Python Parser" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] [package.extras] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] @@ -538,6 +1109,10 @@ description = "Pexpect allows easy control of interactive console applications." category = "main" optional = false python-versions = "*" +files = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] [package.dependencies] ptyprocess = ">=0.5" @@ -549,6 +1124,10 @@ description = "Tiny 'shelve'-like database with concurrency support" category = "main" optional = false python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] [[package]] name = "pluggy" @@ -557,6 +1136,10 @@ description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] [package.extras] dev = ["pre-commit", "tox"] @@ -564,22 +1147,30 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "prometheus-client" -version = "0.14.1" +version = "0.15.0" description = "Python client for the Prometheus monitoring system." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, + {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, +] [package.extras] twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.29" +version = "3.0.36" description = "Library for building powerful interactive command lines in Python" category = "main" optional = false python-versions = ">=3.6.2" +files = [ + {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, + {file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"}, +] [package.dependencies] wcwidth = "*" @@ -591,6 +1182,10 @@ description = "Run a subprocess in a pseudo terminal" category = "main" optional = false python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] [[package]] name = "pure-eval" @@ -599,18 +1194,14 @@ description = "Safely evaluate AST nodes without side effects" category = "main" optional = false python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] [package.extras] tests = ["pytest"] -[[package]] -name = "py" -version = "1.11.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "pyasn1" version = "0.4.8" @@ -618,6 +1209,10 @@ description = "ASN.1 types and codecs" category = "main" optional = false python-versions = "*" +files = [ + {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, +] [[package]] name = "pyasn1-modules" @@ -626,17 +1221,25 @@ description = "A collection of ASN.1-based protocols modules." category = "main" optional = false python-versions = "*" +files = [ + {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, + {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, +] [package.dependencies] pyasn1 = ">=0.4.6,<0.5.0" [[package]] name = "pycodestyle" -version = "2.8.0" +version = "2.10.0" description = "Python style guide checker" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"}, + {file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"}, +] [[package]] name = "pycoin" @@ -645,6 +1248,9 @@ description = "Utilities for Bitcoin and altcoin addresses and transaction manip category = "main" optional = false python-versions = "*" +files = [ + {file = "pycoin-0.92.20220529.tar.gz", hash = "sha256:3d0396475b5e2d9da7a5057eab72be0e088505e8e44680788106236872c542a2"}, +] [[package]] name = "pycparser" @@ -653,77 +1259,145 @@ description = "C parser in Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pydantic" +version = "1.10.6" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f9289065611c48147c1dd1fd344e9d57ab45f1d99b0fb26c51f1cf72cd9bcd31"}, + {file = "pydantic-1.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8c32b6bba301490d9bb2bf5f631907803135e8085b6aa3e5fe5a770d46dd0160"}, + {file = "pydantic-1.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd9b9e98068fa1068edfc9eabde70a7132017bdd4f362f8b4fd0abed79c33083"}, + {file = "pydantic-1.10.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c84583b9df62522829cbc46e2b22e0ec11445625b5acd70c5681ce09c9b11c4"}, + {file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b41822064585fea56d0116aa431fbd5137ce69dfe837b599e310034171996084"}, + {file = "pydantic-1.10.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61f1f08adfaa9cc02e0cbc94f478140385cbd52d5b3c5a657c2fceb15de8d1fb"}, + {file = "pydantic-1.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:32937835e525d92c98a1512218db4eed9ddc8f4ee2a78382d77f54341972c0e7"}, + {file = "pydantic-1.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbd5c531b22928e63d0cb1868dee76123456e1de2f1cb45879e9e7a3f3f1779b"}, + {file = "pydantic-1.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e277bd18339177daa62a294256869bbe84df1fb592be2716ec62627bb8d7c81d"}, + {file = "pydantic-1.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f15277d720aa57e173954d237628a8d304896364b9de745dcb722f584812c7"}, + {file = "pydantic-1.10.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b243b564cea2576725e77aeeda54e3e0229a168bc587d536cd69941e6797543d"}, + {file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3ce13a558b484c9ae48a6a7c184b1ba0e5588c5525482681db418268e5f86186"}, + {file = "pydantic-1.10.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3ac1cd4deed871dfe0c5f63721e29debf03e2deefa41b3ed5eb5f5df287c7b70"}, + {file = "pydantic-1.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:b1eb6610330a1dfba9ce142ada792f26bbef1255b75f538196a39e9e90388bf4"}, + {file = "pydantic-1.10.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4ca83739c1263a044ec8b79df4eefc34bbac87191f0a513d00dd47d46e307a65"}, + {file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea4e2a7cb409951988e79a469f609bba998a576e6d7b9791ae5d1e0619e1c0f2"}, + {file = "pydantic-1.10.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53de12b4608290992a943801d7756f18a37b7aee284b9ffa794ee8ea8153f8e2"}, + {file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:60184e80aac3b56933c71c48d6181e630b0fbc61ae455a63322a66a23c14731a"}, + {file = "pydantic-1.10.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:415a3f719ce518e95a92effc7ee30118a25c3d032455d13e121e3840985f2efd"}, + {file = "pydantic-1.10.6-cp37-cp37m-win_amd64.whl", hash = "sha256:72cb30894a34d3a7ab6d959b45a70abac8a2a93b6480fc5a7bfbd9c935bdc4fb"}, + {file = "pydantic-1.10.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3091d2eaeda25391405e36c2fc2ed102b48bac4b384d42b2267310abae350ca6"}, + {file = "pydantic-1.10.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:751f008cd2afe812a781fd6aa2fb66c620ca2e1a13b6a2152b1ad51553cb4b77"}, + {file = "pydantic-1.10.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12e837fd320dd30bd625be1b101e3b62edc096a49835392dcf418f1a5ac2b832"}, + {file = "pydantic-1.10.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d92831d0115874d766b1f5fddcdde0c5b6c60f8c6111a394078ec227fca6d"}, + {file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:476f6674303ae7965730a382a8e8d7fae18b8004b7b69a56c3d8fa93968aa21c"}, + {file = "pydantic-1.10.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3a2be0a0f32c83265fd71a45027201e1278beaa82ea88ea5b345eea6afa9ac7f"}, + {file = "pydantic-1.10.6-cp38-cp38-win_amd64.whl", hash = "sha256:0abd9c60eee6201b853b6c4be104edfba4f8f6c5f3623f8e1dba90634d63eb35"}, + {file = "pydantic-1.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6195ca908045054dd2d57eb9c39a5fe86409968b8040de8c2240186da0769da7"}, + {file = "pydantic-1.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43cdeca8d30de9a897440e3fb8866f827c4c31f6c73838e3a01a14b03b067b1d"}, + {file = "pydantic-1.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c19eb5163167489cb1e0161ae9220dadd4fc609a42649e7e84a8fa8fff7a80f"}, + {file = "pydantic-1.10.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:012c99a9c0d18cfde7469aa1ebff922e24b0c706d03ead96940f5465f2c9cf62"}, + {file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:528dcf7ec49fb5a84bf6fe346c1cc3c55b0e7603c2123881996ca3ad79db5bfc"}, + {file = "pydantic-1.10.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:163e79386c3547c49366e959d01e37fc30252285a70619ffc1b10ede4758250a"}, + {file = "pydantic-1.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:189318051c3d57821f7233ecc94708767dd67687a614a4e8f92b4a020d4ffd06"}, + {file = "pydantic-1.10.6-py3-none-any.whl", hash = "sha256:acc6783751ac9c9bc4680379edd6d286468a1dc8d7d9906cd6f1186ed682b2b0"}, + {file = "pydantic-1.10.6.tar.gz", hash = "sha256:cf95adb0d1671fc38d8c43dd921ad5814a735e7d9b4d9e437c088002863854fd"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] [[package]] name = "pyflakes" -version = "2.4.0" +version = "3.0.1" description = "passive checker of Python programs" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" +files = [ + {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, + {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, +] [[package]] name = "pygments" -version = "2.12.0" +version = "2.13.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +] + +[package.extras] +plugins = ["importlib-metadata"] [[package]] name = "pyopenssl" -version = "22.0.0" +version = "22.1.0" description = "Python wrapper module around the OpenSSL library" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "pyOpenSSL-22.1.0-py3-none-any.whl", hash = "sha256:b28437c9773bb6c6958628cf9c3bebe585de661dba6f63df17111966363dd15e"}, + {file = "pyOpenSSL-22.1.0.tar.gz", hash = "sha256:7a83b7b272dd595222d672f5ce29aa030f1fb837630ef229f62e72e395ce8968"}, +] [package.dependencies] -cryptography = ">=35.0" +cryptography = ">=38.0.0,<39" [package.extras] -docs = ["sphinx", "sphinx-rtd-theme"] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] test = ["flaky", "pretend", "pytest (>=3.0.1)"] -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" -optional = false -python-versions = ">=3.6.8" - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pytest" -version = "7.1.2" +version = "7.2.0" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, + {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, +] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" -py = ">=1.8.2" -tomli = ">=1.0.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] [[package]] name = "pytest-cov" -version = "3.0.0" +version = "4.0.0" description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=3.6" +files = [ + {file = "pytest-cov-4.0.0.tar.gz", hash = "sha256:996b79efde6433cdbd0088872dbc5fb3ed7fe1578b68cdbba634f14bb8dd0470"}, + {file = "pytest_cov-4.0.0-py3-none-any.whl", hash = "sha256:2feb1b751d66a8bd934e5edfa2e961d11309dc37b73b0eabe73b5945fee20f6b"}, +] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} @@ -732,62 +1406,111 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +[[package]] +name = "pytest-xdist" +version = "3.2.0" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-xdist-3.2.0.tar.gz", hash = "sha256:fa10f95a2564cd91652f2d132725183c3b590d9fdcdec09d3677386ecf4c1ce9"}, + {file = "pytest_xdist-3.2.0-py3-none-any.whl", hash = "sha256:336098e3bbd8193276867cc87db8b22903c3927665dff9d1ac8684c02f597b68"}, +] + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.2.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + [[package]] name = "pywin32" -version = "304" +version = "305" description = "Python for Window Extensions" category = "main" optional = false python-versions = "*" +files = [ + {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, + {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, + {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, + {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, + {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, + {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, + {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, + {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, + {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, + {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, + {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, + {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, + {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, + {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, +] [[package]] name = "requests" -version = "2.27.1" +version = "2.28.1" description = "Python HTTP for Humans." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} -idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" urllib3 = ">=1.21.1,<1.27" [package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<5)"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rocksdb" version = "0.9.1" -description = "" +description = "Python bindings for RocksDB" category = "main" optional = false python-versions = "*" +files = [] develop = false +[package.dependencies] +setuptools = ">=25" + [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] +doc = ["sphinx", "sphinx_rtd_theme"] test = ["pytest"] [package.source] type = "git" url = "https://github.com/hathornetwork/python-rocksdb.git" -reference = "master" -resolved_reference = "947f68a80d97c4a5621ee681ae01602ebd883f3a" +reference = "HEAD" +resolved_reference = "72edcfbd22f4a3ca816f94096d3ec181da41031e" [[package]] name = "sentry-sdk" -version = "1.5.12" +version = "1.11.1" description = "Python client for Sentry (https://sentry.io)" category = "main" optional = true python-versions = "*" +files = [ + {file = "sentry-sdk-1.11.1.tar.gz", hash = "sha256:675f6279b6bb1fea09fd61751061f9a90dca3b5929ef631dd50dc8b3aeb245e9"}, + {file = "sentry_sdk-1.11.1-py2.py3-none-any.whl", hash = "sha256:8b4ff696c0bdcceb3f70bbb87a57ba84fd3168b1332d493fcd16c137f709578c"}, +] [package.dependencies] certifi = "*" -urllib3 = ">=1.10.0" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} [package.extras] aiohttp = ["aiohttp (>=3.5)"] @@ -797,14 +1520,17 @@ celery = ["celery (>=3)"] chalice = ["chalice (>=1.16.0)"] django = ["django (>=1.8)"] falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] flask = ["blinker (>=1.1)", "flask (>=0.11)"] httpx = ["httpx (>=0.16.0)"] pure-eval = ["asttokens", "executing", "pure-eval"] +pymongo = ["pymongo (>=3.1)"] pyspark = ["pyspark (>=2.4.4)"] quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] rq = ["rq (>=0.6)"] sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] tornado = ["tornado (>=5)"] [[package]] @@ -814,6 +1540,10 @@ description = "Service identity verification for pyOpenSSL & cryptography." category = "main" optional = false python-versions = "*" +files = [ + {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, + {file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"}, +] [package.dependencies] attrs = ">=19.1.0" @@ -830,22 +1560,100 @@ tests = ["coverage[toml] (>=5.0.2)", "pytest"] [[package]] name = "setproctitle" -version = "1.2.3" +version = "1.3.2" description = "A Python module to customize the process title" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:288943dec88e178bb2fd868adf491197cc0fc8b6810416b1c6775e686bab87fe"}, + {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:630f6fe5e24a619ccf970c78e084319ee8be5be253ecc9b5b216b0f474f5ef18"}, + {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c877691b90026670e5a70adfbcc735460a9f4c274d35ec5e8a43ce3f8443005"}, + {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a55fe05f15c10e8c705038777656fe45e3bd676d49ad9ac8370b75c66dd7cd7"}, + {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab45146c71ca6592c9cc8b354a2cc9cc4843c33efcbe1d245d7d37ce9696552d"}, + {file = "setproctitle-1.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00c9d5c541a2713ba0e657e0303bf96ddddc412ef4761676adc35df35d7c246"}, + {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:265ecbe2c6eafe82e104f994ddd7c811520acdd0647b73f65c24f51374cf9494"}, + {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c2c46200656280a064073447ebd363937562debef329482fd7e570c8d498f806"}, + {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:fa2f50678f04fda7a75d0fe5dd02bbdd3b13cbe6ed4cf626e4472a7ccf47ae94"}, + {file = "setproctitle-1.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f2719a398e1a2c01c2a63bf30377a34d0b6ef61946ab9cf4d550733af8f1ef1"}, + {file = "setproctitle-1.3.2-cp310-cp310-win32.whl", hash = "sha256:e425be62524dc0c593985da794ee73eb8a17abb10fe692ee43bb39e201d7a099"}, + {file = "setproctitle-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:e85e50b9c67854f89635a86247412f3ad66b132a4d8534ac017547197c88f27d"}, + {file = "setproctitle-1.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a97d51c17d438cf5be284775a322d57b7ca9505bb7e118c28b1824ecaf8aeaa"}, + {file = "setproctitle-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:587c7d6780109fbd8a627758063d08ab0421377c0853780e5c356873cdf0f077"}, + {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d17c8bd073cbf8d141993db45145a70b307385b69171d6b54bcf23e5d644de"}, + {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e932089c35a396dc31a5a1fc49889dd559548d14cb2237adae260382a090382e"}, + {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e4f8f12258a8739c565292a551c3db62cca4ed4f6b6126664e2381acb4931bf"}, + {file = "setproctitle-1.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:570d255fd99c7f14d8f91363c3ea96bd54f8742275796bca67e1414aeca7d8c3"}, + {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a8e0881568c5e6beff91ef73c0ec8ac2a9d3ecc9edd6bd83c31ca34f770910c4"}, + {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4bba3be4c1fabf170595b71f3af46c6d482fbe7d9e0563999b49999a31876f77"}, + {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:37ece938110cab2bb3957e3910af8152ca15f2b6efdf4f2612e3f6b7e5459b80"}, + {file = "setproctitle-1.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db684d6bbb735a80bcbc3737856385b55d53f8a44ce9b46e9a5682c5133a9bf7"}, + {file = "setproctitle-1.3.2-cp311-cp311-win32.whl", hash = "sha256:ca58cd260ea02759238d994cfae844fc8b1e206c684beb8f38877dcab8451dfc"}, + {file = "setproctitle-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:88486e6cce2a18a033013d17b30a594f1c5cb42520c49c19e6ade40b864bb7ff"}, + {file = "setproctitle-1.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:92c626edc66169a1b09e9541b9c0c9f10488447d8a2b1d87c8f0672e771bc927"}, + {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:710e16fa3bade3b026907e4a5e841124983620046166f355bbb84be364bf2a02"}, + {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f29b75e86260b0ab59adb12661ef9f113d2f93a59951373eb6d68a852b13e83"}, + {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c8d9650154afaa86a44ff195b7b10d683c73509d085339d174e394a22cccbb9"}, + {file = "setproctitle-1.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0452282258dfcc01697026a8841258dd2057c4438b43914b611bccbcd048f10"}, + {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e49ae693306d7624015f31cb3e82708916759d592c2e5f72a35c8f4cc8aef258"}, + {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1ff863a20d1ff6ba2c24e22436a3daa3cd80be1dfb26891aae73f61b54b04aca"}, + {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:55ce1e9925ce1765865442ede9dca0ba9bde10593fcd570b1f0fa25d3ec6b31c"}, + {file = "setproctitle-1.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7fe9df7aeb8c64db6c34fc3b13271a363475d77bc157d3f00275a53910cb1989"}, + {file = "setproctitle-1.3.2-cp37-cp37m-win32.whl", hash = "sha256:e5c50e164cd2459bc5137c15288a9ef57160fd5cbf293265ea3c45efe7870865"}, + {file = "setproctitle-1.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a499fff50387c1520c085a07578a000123f519e5f3eee61dd68e1d301659651f"}, + {file = "setproctitle-1.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5b932c3041aa924163f4aab970c2f0e6b4d9d773f4d50326e0ea1cd69240e5c5"}, + {file = "setproctitle-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4bfc89bd33ebb8e4c0e9846a09b1f5a4a86f5cb7a317e75cc42fee1131b4f4f"}, + {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcd3cf4286a60fdc95451d8d14e0389a6b4f5cebe02c7f2609325eb016535963"}, + {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5fb4f769c02f63fac90989711a3fee83919f47ae9afd4758ced5d86596318c65"}, + {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5194b4969f82ea842a4f6af2f82cd16ebdc3f1771fb2771796e6add9835c1973"}, + {file = "setproctitle-1.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cde41857a644b7353a0060b5f94f7ba7cf593ebde5a1094da1be581ac9a31"}, + {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9124bedd8006b0e04d4e8a71a0945da9b67e7a4ab88fdad7b1440dc5b6122c42"}, + {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c8a09d570b39517de10ee5b718730e171251ce63bbb890c430c725c8c53d4484"}, + {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8ff3c8cb26afaed25e8bca7b9dd0c1e36de71f35a3a0706b5c0d5172587a3827"}, + {file = "setproctitle-1.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:589be87172b238f839e19f146b9ea47c71e413e951ef0dc6db4218ddacf3c202"}, + {file = "setproctitle-1.3.2-cp38-cp38-win32.whl", hash = "sha256:4749a2b0c9ac52f864d13cee94546606f92b981b50e46226f7f830a56a9dc8e1"}, + {file = "setproctitle-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:e43f315c68aa61cbdef522a2272c5a5b9b8fd03c301d3167b5e1343ef50c676c"}, + {file = "setproctitle-1.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:de3a540cd1817ede31f530d20e6a4935bbc1b145fd8f8cf393903b1e02f1ae76"}, + {file = "setproctitle-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4058564195b975ddc3f0462375c533cce310ccdd41b80ac9aed641c296c3eff4"}, + {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c5d5dad7c28bdd1ec4187d818e43796f58a845aa892bb4481587010dc4d362b"}, + {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ffc61a388a5834a97953d6444a2888c24a05f2e333f9ed49f977a87bb1ad4761"}, + {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fa1a0fbee72b47dc339c87c890d3c03a72ea65c061ade3204f285582f2da30f"}, + {file = "setproctitle-1.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8a988c7220c002c45347430993830666e55bc350179d91fcee0feafe64e1d4"}, + {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bae283e85fc084b18ffeb92e061ff7ac5af9e183c9d1345c93e178c3e5069cbe"}, + {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fed18e44711c5af4b681c2b3b18f85e6f0f1b2370a28854c645d636d5305ccd8"}, + {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b34baef93bfb20a8ecb930e395ccd2ae3268050d8cf4fe187de5e2bd806fd796"}, + {file = "setproctitle-1.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7f0bed90a216ef28b9d227d8d73e28a8c9b88c0f48a082d13ab3fa83c581488f"}, + {file = "setproctitle-1.3.2-cp39-cp39-win32.whl", hash = "sha256:4d8938249a7cea45ab7e1e48b77685d0f2bab1ebfa9dde23e94ab97968996a7c"}, + {file = "setproctitle-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:a47d97a75fd2d10c37410b180f67a5835cb1d8fdea2648fd7f359d4277f180b9"}, + {file = "setproctitle-1.3.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:dad42e676c5261eb50fdb16bdf3e2771cf8f99a79ef69ba88729aeb3472d8575"}, + {file = "setproctitle-1.3.2-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c91b9bc8985d00239f7dc08a49927a7ca1ca8a6af2c3890feec3ed9665b6f91e"}, + {file = "setproctitle-1.3.2-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8579a43eafd246e285eb3a5b939e7158073d5087aacdd2308f23200eac2458b"}, + {file = "setproctitle-1.3.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:2fbd8187948284293f43533c150cd69a0e4192c83c377da837dbcd29f6b83084"}, + {file = "setproctitle-1.3.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:faec934cfe5fd6ac1151c02e67156c3f526e82f96b24d550b5d51efa4a5527c6"}, + {file = "setproctitle-1.3.2-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1aafc91cbdacc9e5fe712c52077369168e6b6c346f3a9d51bf600b53eae56bb"}, + {file = "setproctitle-1.3.2-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b617f12c9be61e8f4b2857be4a4319754756845dbbbd9c3718f468bbb1e17bcb"}, + {file = "setproctitle-1.3.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b2c9cb2705fc84cb8798f1ba74194f4c080aaef19d9dae843591c09b97678e98"}, + {file = "setproctitle-1.3.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a149a5f7f2c5a065d4e63cb0d7a4b6d3b66e6e80f12e3f8827c4f63974cbf122"}, + {file = "setproctitle-1.3.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e3ac25bfc4a0f29d2409650c7532d5ddfdbf29f16f8a256fc31c47d0dc05172"}, + {file = "setproctitle-1.3.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65d884e22037b23fa25b2baf1a3316602ed5c5971eb3e9d771a38c3a69ce6e13"}, + {file = "setproctitle-1.3.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7aa0aac1711fadffc1d51e9d00a3bea61f68443d6ac0241a224e4d622489d665"}, + {file = "setproctitle-1.3.2.tar.gz", hash = "sha256:b9fb97907c830d260fa0658ed58afd48a86b2b88aac521135c352ff7fd3477fd"}, +] [package.extras] test = ["pytest"] [[package]] name = "setuptools" -version = "65.5.1" +version = "65.6.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, +] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] @@ -859,6 +1667,10 @@ description = "Python 2 and 3 compatibility utilities" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] [[package]] name = "sortedcontainers" @@ -867,18 +1679,26 @@ description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" category = "main" optional = false python-versions = "*" +files = [ + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, +] [[package]] name = "stack-data" -version = "0.3.0" +version = "0.6.2" description = "Extract data from python stack frames and tracebacks for informative displays" category = "main" optional = false python-versions = "*" +files = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] [package.dependencies] -asttokens = "*" -executing = "*" +asttokens = ">=2.1.0" +executing = ">=1.2.0" pure-eval = "*" [package.extras] @@ -886,16 +1706,21 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "structlog" -version = "21.5.0" +version = "22.3.0" description = "Structured Logging for Python" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "structlog-22.3.0-py3-none-any.whl", hash = "sha256:b403f344f902b220648fa9f286a23c0cc5439a5844d271fec40562dbadbc70ad"}, + {file = "structlog-22.3.0.tar.gz", hash = "sha256:e7509391f215e4afb88b1b80fa3ea074be57a5a17d794bd436a5c949da023333"}, +] [package.extras] -dev = ["cogapp", "coverage[toml]", "freezegun (>=0.2.8)", "furo", "pre-commit", "pretend", "pytest (>=6.0)", "pytest-asyncio", "rich", "simplejson", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "tomli", "twisted"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] -tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio", "simplejson"] +dev = ["structlog[docs,tests,typing]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] +tests = ["coverage[toml]", "freezegun (>=0.2.8)", "pretend", "pytest (>=6.0)", "pytest-asyncio (>=0.17)", "simplejson"] +typing = ["mypy", "rich", "twisted"] [[package]] name = "structlog-sentry" @@ -904,6 +1729,10 @@ description = "Sentry integration for structlog" category = "main" optional = true python-versions = ">=3.6,<4.0" +files = [ + {file = "structlog-sentry-1.4.0.tar.gz", hash = "sha256:5fc6cfab71b858d71433e68cc5af79a396e72015003931507e340b3687ebb0a8"}, + {file = "structlog_sentry-1.4.0-py3-none-any.whl", hash = "sha256:04627538e13bb0719a8806353279d40c1d1afb3eb2053817820754b9a08814a7"}, +] [package.dependencies] sentry-sdk = "*" @@ -915,25 +1744,40 @@ description = "A lil' TOML parser" category = "dev" optional = false python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] [[package]] name = "traitlets" -version = "5.2.0" +version = "5.7.0" description = "Traitlets Python configuration system" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "traitlets-5.7.0-py3-none-any.whl", hash = "sha256:61832ea7b7f910f5745e27e9bb269a181fd15af76027d99560299209d5b17c94"}, + {file = "traitlets-5.7.0.tar.gz", hash = "sha256:bd0fca5c890a09bf66b33cce67ca14156b080429bc39c7ef26b075a4bd4f9fc3"}, +] [package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] test = ["pre-commit", "pytest"] +typing = ["mypy (>=0.990)"] [[package]] name = "twisted" -version = "22.4.0" +version = "22.10.0" description = "An asynchronous networking framework written in Python" category = "main" optional = false -python-versions = ">=3.6.7" +python-versions = ">=3.7.1" +files = [ + {file = "Twisted-22.10.0-py3-none-any.whl", hash = "sha256:86c55f712cc5ab6f6d64e02503352464f0400f66d4f079096d744080afcccbd0"}, + {file = "Twisted-22.10.0.tar.gz", hash = "sha256:32acbd40a94f5f46e7b42c109bfae2b302250945561783a8b7a059048f2d4d31"}, +] [package.dependencies] attrs = ">=19.2.0" @@ -946,20 +1790,21 @@ typing-extensions = ">=3.6.5" "zope.interface" = ">=4.4.2" [package.extras] -all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +all-non-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] conch-nacl = ["PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "cryptography (>=2.6)", "pyasn1"] contextvars = ["contextvars (>=2.4,<3)"] -dev = ["coverage (>=6b1,<7)", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)"] -dev-release = ["pydoctor (>=21.9.0,<21.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)"] +dev = ["coverage (>=6b1,<7)", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "python-subunit (>=1.4,<2.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)"] +dev-release = ["pydoctor (>=22.9.0,<22.10.0)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)"] +gtk-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pygobject", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] -mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=21.9.0,<21.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=4.1.2,<6)", "sphinx-rtd-theme (>=0.5,<1.0)", "towncrier (>=19.2,<20.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] -osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +macos-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +mypy = ["PyHamcrest (>=1.9.0)", "PyNaCl", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "coverage (>=6b1,<7)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "mypy (==0.930)", "mypy-zope (==0.3.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pydoctor (>=22.9.0,<22.10.0)", "pyflakes (>=2.2,<3.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "readthedocs-sphinx-ext (>=2.1,<3.0)", "service-identity (>=18.1.0)", "sphinx (>=5.0,<6)", "sphinx-rtd-theme (>=1.0,<2.0)", "towncrier (>=22.8,<23.0)", "twistedchecker (>=0.7,<1.0)", "types-pyOpenSSL", "types-setuptools"] +osx-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyobjc-core", "pyobjc-framework-CFNetwork", "pyobjc-framework-Cocoa", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] -test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)"] -tls = ["idna (>=2.4)", "pyopenssl (>=16.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=16.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] +test = ["PyHamcrest (>=1.9.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.0,<7.0)"] +tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] +windows-platform = ["PyHamcrest (>=1.9.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.0.0)", "contextvars (>=2.4,<3)", "cryptography (>=2.6)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.0,<7.0)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "pyasn1", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)"] [[package]] name = "twisted-iocpsupport" @@ -968,6 +1813,20 @@ description = "An extension for use in the twisted I/O Completion Ports reactor. category = "main" optional = false python-versions = "*" +files = [ + {file = "twisted-iocpsupport-1.0.2.tar.gz", hash = "sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9"}, + {file = "twisted_iocpsupport-1.0.2-cp310-cp310-win32.whl", hash = "sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4"}, + {file = "twisted_iocpsupport-1.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323"}, + {file = "twisted_iocpsupport-1.0.2-cp36-cp36m-win32.whl", hash = "sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f"}, + {file = "twisted_iocpsupport-1.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565"}, + {file = "twisted_iocpsupport-1.0.2-cp37-cp37m-win32.whl", hash = "sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878"}, + {file = "twisted_iocpsupport-1.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32"}, + {file = "twisted_iocpsupport-1.0.2-cp38-cp38-win32.whl", hash = "sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415"}, + {file = "twisted_iocpsupport-1.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41"}, + {file = "twisted_iocpsupport-1.0.2-cp39-cp39-win32.whl", hash = "sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d"}, + {file = "twisted_iocpsupport-1.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546"}, + {file = "twisted_iocpsupport-1.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf"}, +] [[package]] name = "txaio" @@ -976,6 +1835,10 @@ description = "Compatibility API between asyncio/Twisted/Trollius" category = "main" optional = false python-versions = ">=3.6" +files = [ + {file = "txaio-22.2.1-py2.py3-none-any.whl", hash = "sha256:41223af4a9d5726e645a8ee82480f413e5e300dd257db94bc38ae12ea48fb2e5"}, + {file = "txaio-22.2.1.tar.gz", hash = "sha256:2e4582b70f04b2345908254684a984206c0d9b50e3074a24a4c55aba21d24d01"}, +] [package.extras] all = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] @@ -984,61 +1847,85 @@ twisted = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] [[package]] name = "types-cryptography" -version = "3.3.21" +version = "3.3.23.2" description = "Typing stubs for cryptography" category = "dev" optional = false python-versions = "*" +files = [ + {file = "types-cryptography-3.3.23.2.tar.gz", hash = "sha256:09cc53f273dd4d8c29fa7ad11fefd9b734126d467960162397bc5e3e604dea75"}, + {file = "types_cryptography-3.3.23.2-py3-none-any.whl", hash = "sha256:b965d548f148f8e87f353ccf2b7bd92719fdf6c845ff7cedf2abb393a0643e4f"}, +] [[package]] name = "types-pyopenssl" -version = "22.0.3" +version = "22.1.0.2" description = "Typing stubs for pyOpenSSL" category = "dev" optional = false python-versions = "*" +files = [ + {file = "types-pyOpenSSL-22.1.0.2.tar.gz", hash = "sha256:7a350e29e55bc3ee4571f996b4b1c18c4e4098947db45f7485b016eaa35b44bc"}, + {file = "types_pyOpenSSL-22.1.0.2-py3-none-any.whl", hash = "sha256:54606a6afb203eb261e0fca9b7f75fa6c24d5ff71e13903c162ffb951c2c64c6"}, +] [package.dependencies] types-cryptography = "*" [[package]] name = "types-requests" -version = "2.27.25" +version = "2.28.11.4" description = "Typing stubs for requests" category = "dev" optional = false python-versions = "*" +files = [ + {file = "types-requests-2.28.11.4.tar.gz", hash = "sha256:d4f342b0df432262e9e326d17638eeae96a5881e78e7a6aae46d33870d73952e"}, + {file = "types_requests-2.28.11.4-py3-none-any.whl", hash = "sha256:bdb1f9811e53d0642c8347b09137363eb25e1a516819e190da187c29595a1df3"}, +] [package.dependencies] types-urllib3 = "<1.27" [[package]] name = "types-urllib3" -version = "1.26.14" +version = "1.26.25.4" description = "Typing stubs for urllib3" category = "dev" optional = false python-versions = "*" +files = [ + {file = "types-urllib3-1.26.25.4.tar.gz", hash = "sha256:eec5556428eec862b1ac578fb69aab3877995a99ffec9e5a12cf7fbd0cc9daee"}, + {file = "types_urllib3-1.26.25.4-py3-none-any.whl", hash = "sha256:ed6b9e8a8be488796f72306889a06a3fc3cb1aa99af02ab8afb50144d7317e49"}, +] [[package]] name = "typing-extensions" -version = "4.2.0" +version = "4.4.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "main" optional = false python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, + {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, +] [[package]] name = "urllib3" -version = "1.26.9" +version = "1.26.13" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, + {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, +] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -1048,26 +1935,110 @@ description = "Measures the displayed width of unicode strings in a terminal" category = "main" optional = false python-versions = "*" +files = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] [[package]] name = "yarl" -version = "1.7.2" +version = "1.8.2" description = "Yet another URL library" category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +files = [ + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, + {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, + {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, + {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, + {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, + {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, + {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, + {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, + {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, + {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, + {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, + {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, + {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, +] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" [[package]] -name = "zope.event" +name = "zope-event" version = "4.5.0" description = "Very basic event publishing system" category = "dev" optional = false python-versions = "*" +files = [ + {file = "zope.event-4.5.0-py2.py3-none-any.whl", hash = "sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42"}, + {file = "zope.event-4.5.0.tar.gz", hash = "sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330"}, +] [package.dependencies] setuptools = "*" @@ -1077,12 +2048,50 @@ docs = ["Sphinx"] test = ["zope.testrunner"] [[package]] -name = "zope.interface" -version = "5.4.0" +name = "zope-interface" +version = "5.5.2" description = "Interfaces for Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "zope.interface-5.5.2-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:a2ad597c8c9e038a5912ac3cf166f82926feff2f6e0dabdab956768de0a258f5"}, + {file = "zope.interface-5.5.2-cp27-cp27m-win_amd64.whl", hash = "sha256:65c3c06afee96c654e590e046c4a24559e65b0a87dbff256cd4bd6f77e1a33f9"}, + {file = "zope.interface-5.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d514c269d1f9f5cd05ddfed15298d6c418129f3f064765295659798349c43e6f"}, + {file = "zope.interface-5.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5334e2ef60d3d9439c08baedaf8b84dc9bb9522d0dacbc10572ef5609ef8db6d"}, + {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc26c8d44472e035d59d6f1177eb712888447f5799743da9c398b0339ed90b1b"}, + {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:17ebf6e0b1d07ed009738016abf0d0a0f80388e009d0ac6e0ead26fc162b3b9c"}, + {file = "zope.interface-5.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f98d4bd7bbb15ca701d19b93263cc5edfd480c3475d163f137385f49e5b3a3a7"}, + {file = "zope.interface-5.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:696f3d5493eae7359887da55c2afa05acc3db5fc625c49529e84bd9992313296"}, + {file = "zope.interface-5.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7579960be23d1fddecb53898035a0d112ac858c3554018ce615cefc03024e46d"}, + {file = "zope.interface-5.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:765d703096ca47aa5d93044bf701b00bbce4d903a95b41fff7c3796e747b1f1d"}, + {file = "zope.interface-5.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e945de62917acbf853ab968d8916290548df18dd62c739d862f359ecd25842a6"}, + {file = "zope.interface-5.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:655796a906fa3ca67273011c9805c1e1baa047781fca80feeb710328cdbed87f"}, + {file = "zope.interface-5.5.2-cp35-cp35m-win_amd64.whl", hash = "sha256:0fb497c6b088818e3395e302e426850f8236d8d9f4ef5b2836feae812a8f699c"}, + {file = "zope.interface-5.5.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:008b0b65c05993bb08912f644d140530e775cf1c62a072bf9340c2249e613c32"}, + {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:404d1e284eda9e233c90128697c71acffd55e183d70628aa0bbb0e7a3084ed8b"}, + {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3218ab1a7748327e08ef83cca63eea7cf20ea7e2ebcb2522072896e5e2fceedf"}, + {file = "zope.interface-5.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d169ccd0756c15bbb2f1acc012f5aab279dffc334d733ca0d9362c5beaebe88e"}, + {file = "zope.interface-5.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:e1574980b48c8c74f83578d1e77e701f8439a5d93f36a5a0af31337467c08fcf"}, + {file = "zope.interface-5.5.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:0217a9615531c83aeedb12e126611b1b1a3175013bbafe57c702ce40000eb9a0"}, + {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:311196634bb9333aa06f00fc94f59d3a9fddd2305c2c425d86e406ddc6f2260d"}, + {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6373d7eb813a143cb7795d3e42bd8ed857c82a90571567e681e1b3841a390d16"}, + {file = "zope.interface-5.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:959697ef2757406bff71467a09d940ca364e724c534efbf3786e86eee8591452"}, + {file = "zope.interface-5.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:dbaeb9cf0ea0b3bc4b36fae54a016933d64c6d52a94810a63c00f440ecb37dd7"}, + {file = "zope.interface-5.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:604cdba8f1983d0ab78edc29aa71c8df0ada06fb147cea436dc37093a0100a4e"}, + {file = "zope.interface-5.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e74a578172525c20d7223eac5f8ad187f10940dac06e40113d62f14f3adb1e8f"}, + {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0980d44b8aded808bec5059018d64692f0127f10510eca71f2f0ace8fb11188"}, + {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6e972493cdfe4ad0411fd9abfab7d4d800a7317a93928217f1a5de2bb0f0d87a"}, + {file = "zope.interface-5.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9d783213fab61832dbb10d385a319cb0e45451088abd45f95b5bb88ed0acca1a"}, + {file = "zope.interface-5.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:a16025df73d24795a0bde05504911d306307c24a64187752685ff6ea23897cb0"}, + {file = "zope.interface-5.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:40f4065745e2c2fa0dff0e7ccd7c166a8ac9748974f960cd39f63d2c19f9231f"}, + {file = "zope.interface-5.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a2ffadefd0e7206adc86e492ccc60395f7edb5680adedf17a7ee4205c530df4"}, + {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d692374b578360d36568dd05efb8a5a67ab6d1878c29c582e37ddba80e66c396"}, + {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4087e253bd3bbbc3e615ecd0b6dd03c4e6a1e46d152d3be6d2ad08fbad742dcc"}, + {file = "zope.interface-5.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fb68d212efd057596dee9e6582daded9f8ef776538afdf5feceb3059df2d2e7b"}, + {file = "zope.interface-5.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:7e66f60b0067a10dd289b29dceabd3d0e6d68be1504fc9d0bc209cf07f56d189"}, + {file = "zope.interface-5.5.2.tar.gz", hash = "sha256:bfee1f3ff62143819499e348f5b8a7f3aa0259f9aca5e0ddae7391d059dce671"}, +] [package.dependencies] setuptools = "*" @@ -1093,12 +2102,16 @@ test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [[package]] -name = "zope.schema" -version = "6.2.0" +name = "zope-schema" +version = "6.2.1" description = "zope.interface extension for defining data schemas" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "zope.schema-6.2.1-py2.py3-none-any.whl", hash = "sha256:843c6fce13886333f707246f135a832f4408555ca9650f07ed6e4d429302f349"}, + {file = "zope.schema-6.2.1.tar.gz", hash = "sha256:e3b33c8bc8ba9d85a56713ab8f0a3c2615d54a8085f1e415ce0c8dfb5e540be6"}, +] [package.dependencies] setuptools = "*" @@ -1113,957 +2126,6 @@ test = ["zope.i18nmessageid", "zope.testing", "zope.testrunner"] sentry = ["sentry-sdk", "structlog-sentry"] [metadata] -lock-version = "1.1" -python-versions = ">=3.8,<4" -content-hash = "d75df6df025311f162b20a76bacd6bb59a3527257a97ec03067d0d67a7836b5a" - -[metadata.files] -aiohttp = [ - {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, - {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"}, - {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"}, - {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"}, - {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"}, - {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"}, - {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"}, - {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"}, - {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"}, - {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"}, - {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"}, - {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"}, - {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"}, - {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"}, - {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"}, - {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"}, - {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"}, - {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"}, - {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"}, - {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"}, - {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"}, - {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"}, - {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"}, - {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"}, - {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"}, - {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"}, - {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"}, - {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"}, - {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"}, - {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"}, - {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"}, - {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"}, -] -aiosignal = [ - {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, - {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, -] -appnope = [ - {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, - {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, -] -asttokens = [ - {file = "asttokens-2.0.5-py2.py3-none-any.whl", hash = "sha256:0844691e88552595a6f4a4281a9f7f79b8dd45ca4ccea82e5e05b4bbdb76705c"}, - {file = "asttokens-2.0.5.tar.gz", hash = "sha256:9a54c114f02c7a9480d56550932546a3f1fe71d8a02f1bc7ccd0ee3ee35cf4d5"}, -] -async-timeout = [ - {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, - {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, -] -autobahn = [ - {file = "autobahn-22.4.2.tar.gz", hash = "sha256:57b7acf228d50d83cf327372b889e2a168a869275b26e17917ed0b4cf4d823a6"}, -] -automat = [ - {file = "Automat-20.2.0-py2.py3-none-any.whl", hash = "sha256:b6feb6455337df834f6c9962d6ccf771515b7d939bca142b29c20c2376bc6111"}, - {file = "Automat-20.2.0.tar.gz", hash = "sha256:7979803c74610e11ef0c0d68a2942b152df52da55336e0c9d58daf1831cbdf33"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -base58 = [ - {file = "base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2"}, - {file = "base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c"}, -] -certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, -] -cffi = [ - {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, - {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, - {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, - {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, - {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, - {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, - {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, - {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, - {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, - {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, - {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, - {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, - {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, - {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, - {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, - {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, - {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, - {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, - {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, - {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, - {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, - {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, - {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, - {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, - {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, - {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, -] -charset-normalizer = [ - {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, - {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -configargparse = [ - {file = "ConfigArgParse-1.5.3-py3-none-any.whl", hash = "sha256:18f6535a2db9f6e02bd5626cc7455eac3e96b9ab3d969d366f9aafd5c5c00fe7"}, - {file = "ConfigArgParse-1.5.3.tar.gz", hash = "sha256:1b0b3cbf664ab59dada57123c81eff3d9737e0d11d8cf79e3d6eb10823f1739f"}, -] -constantly = [ - {file = "constantly-15.1.0-py2.py3-none-any.whl", hash = "sha256:dd2fa9d6b1a51a83f0d7dd76293d734046aa176e384bf6e33b7e44880eb37c5d"}, - {file = "constantly-15.1.0.tar.gz", hash = "sha256:586372eb92059873e29eba4f9dec8381541b4d3834660707faf8ba59146dfc35"}, -] -coverage = [ - {file = "coverage-6.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7b4da9bafad21ea45a714d3ea6f3e1679099e420c8741c74905b92ee9bfa7cc"}, - {file = "coverage-6.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde17bc42e0716c94bf19d92e4c9f5a00c5feb401f5bc01101fdf2a8b7cacf60"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdbb0d89923c80dbd435b9cf8bba0ff55585a3cdb28cbec65f376c041472c60d"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:67f9346aeebea54e845d29b487eb38ec95f2ecf3558a3cffb26ee3f0dcc3e760"}, - {file = "coverage-6.4.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42c499c14efd858b98c4e03595bf914089b98400d30789511577aa44607a1b74"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:c35cca192ba700979d20ac43024a82b9b32a60da2f983bec6c0f5b84aead635c"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9cc4f107009bca5a81caef2fca843dbec4215c05e917a59dec0c8db5cff1d2aa"}, - {file = "coverage-6.4.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5f444627b3664b80d078c05fe6a850dd711beeb90d26731f11d492dcbadb6973"}, - {file = "coverage-6.4.4-cp310-cp310-win32.whl", hash = "sha256:66e6df3ac4659a435677d8cd40e8eb1ac7219345d27c41145991ee9bf4b806a0"}, - {file = "coverage-6.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:35ef1f8d8a7a275aa7410d2f2c60fa6443f4a64fae9be671ec0696a68525b875"}, - {file = "coverage-6.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c1328d0c2f194ffda30a45f11058c02410e679456276bfa0bbe0b0ee87225fac"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61b993f3998ee384935ee423c3d40894e93277f12482f6e777642a0141f55782"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d5dd4b8e9cd0deb60e6fcc7b0647cbc1da6c33b9e786f9c79721fd303994832f"}, - {file = "coverage-6.4.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7026f5afe0d1a933685d8f2169d7c2d2e624f6255fb584ca99ccca8c0e966fd7"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9c7b9b498eb0c0d48b4c2abc0e10c2d78912203f972e0e63e3c9dc21f15abdaa"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ee2b2fb6eb4ace35805f434e0f6409444e1466a47f620d1d5763a22600f0f892"}, - {file = "coverage-6.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ab066f5ab67059d1f1000b5e1aa8bbd75b6ed1fc0014559aea41a9eb66fc2ce0"}, - {file = "coverage-6.4.4-cp311-cp311-win32.whl", hash = "sha256:9d6e1f3185cbfd3d91ac77ea065d85d5215d3dfa45b191d14ddfcd952fa53796"}, - {file = "coverage-6.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e3d3c4cc38b2882f9a15bafd30aec079582b819bec1b8afdbde8f7797008108a"}, - {file = "coverage-6.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a095aa0a996ea08b10580908e88fbaf81ecf798e923bbe64fb98d1807db3d68a"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef6f44409ab02e202b31a05dd6666797f9de2aa2b4b3534e9d450e42dea5e817"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b7101938584d67e6f45f0015b60e24a95bf8dea19836b1709a80342e01b472f"}, - {file = "coverage-6.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a32ec68d721c3d714d9b105c7acf8e0f8a4f4734c811eda75ff3718570b5e3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6a864733b22d3081749450466ac80698fe39c91cb6849b2ef8752fd7482011f3"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08002f9251f51afdcc5e3adf5d5d66bb490ae893d9e21359b085f0e03390a820"}, - {file = "coverage-6.4.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a3b2752de32c455f2521a51bd3ffb53c5b3ae92736afde67ce83477f5c1dd928"}, - {file = "coverage-6.4.4-cp37-cp37m-win32.whl", hash = "sha256:f855b39e4f75abd0dfbcf74a82e84ae3fc260d523fcb3532786bcbbcb158322c"}, - {file = "coverage-6.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ee6ae6bbcac0786807295e9687169fba80cb0617852b2fa118a99667e8e6815d"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:564cd0f5b5470094df06fab676c6d77547abfdcb09b6c29c8a97c41ad03b103c"}, - {file = "coverage-6.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cbbb0e4cd8ddcd5ef47641cfac97d8473ab6b132dd9a46bacb18872828031685"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6113e4df2fa73b80f77663445be6d567913fb3b82a86ceb64e44ae0e4b695de1"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d032bfc562a52318ae05047a6eb801ff31ccee172dc0d2504614e911d8fa83e"}, - {file = "coverage-6.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e431e305a1f3126477abe9a184624a85308da8edf8486a863601d58419d26ffa"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cf2afe83a53f77aec067033199797832617890e15bed42f4a1a93ea24794ae3e"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:783bc7c4ee524039ca13b6d9b4186a67f8e63d91342c713e88c1865a38d0892a"}, - {file = "coverage-6.4.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ff934ced84054b9018665ca3967fc48e1ac99e811f6cc99ea65978e1d384454b"}, - {file = "coverage-6.4.4-cp38-cp38-win32.whl", hash = "sha256:e1fabd473566fce2cf18ea41171d92814e4ef1495e04471786cbc943b89a3781"}, - {file = "coverage-6.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:4179502f210ebed3ccfe2f78bf8e2d59e50b297b598b100d6c6e3341053066a2"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:98c0b9e9b572893cdb0a00e66cf961a238f8d870d4e1dc8e679eb8bdc2eb1b86"}, - {file = "coverage-6.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fc600f6ec19b273da1d85817eda339fb46ce9eef3e89f220055d8696e0a06908"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a98d6bf6d4ca5c07a600c7b4e0c5350cd483c85c736c522b786be90ea5bac4f"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01778769097dbd705a24e221f42be885c544bb91251747a8a3efdec6eb4788f2"}, - {file = "coverage-6.4.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfa0b97eb904255e2ab24166071b27408f1f69c8fbda58e9c0972804851e0558"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:fcbe3d9a53e013f8ab88734d7e517eb2cd06b7e689bedf22c0eb68db5e4a0a19"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:15e38d853ee224e92ccc9a851457fb1e1f12d7a5df5ae44544ce7863691c7a0d"}, - {file = "coverage-6.4.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6913dddee2deff8ab2512639c5168c3e80b3ebb0f818fed22048ee46f735351a"}, - {file = "coverage-6.4.4-cp39-cp39-win32.whl", hash = "sha256:354df19fefd03b9a13132fa6643527ef7905712109d9c1c1903f2133d3a4e145"}, - {file = "coverage-6.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:1238b08f3576201ebf41f7c20bf59baa0d05da941b123c6656e42cdb668e9827"}, - {file = "coverage-6.4.4-pp36.pp37.pp38-none-any.whl", hash = "sha256:f67cf9f406cf0d2f08a3515ce2db5b82625a7257f88aad87904674def6ddaec1"}, - {file = "coverage-6.4.4.tar.gz", hash = "sha256:e16c45b726acb780e1e6f88b286d3c10b3914ab03438f32117c4aa52d7f30d58"}, -] -cryptography = [ - {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:984fe150f350a3c91e84de405fe49e688aa6092b3525f407a18b9646f6612320"}, - {file = "cryptography-38.0.3-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:ed7b00096790213e09eb11c97cc6e2b757f15f3d2f85833cd2d3ec3fe37c1722"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bbf203f1a814007ce24bd4d51362991d5cb90ba0c177a9c08825f2cc304d871f"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554bec92ee7d1e9d10ded2f7e92a5d70c1f74ba9524947c0ba0c850c7b011828"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b52c9e5f8aa2b802d48bd693190341fae201ea51c7a167d69fc48b60e8a959"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:728f2694fa743a996d7784a6194da430f197d5c58e2f4e278612b359f455e4a2"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dfb4f4dd568de1b6af9f4cda334adf7d72cf5bc052516e1b2608b683375dd95c"}, - {file = "cryptography-38.0.3-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:5419a127426084933076132d317911e3c6eb77568a1ce23c3ac1e12d111e61e0"}, - {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9b24bcff7853ed18a63cfb0c2b008936a9554af24af2fb146e16d8e1aed75748"}, - {file = "cryptography-38.0.3-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:25c1d1f19729fb09d42e06b4bf9895212292cb27bb50229f5aa64d039ab29146"}, - {file = "cryptography-38.0.3-cp36-abi3-win32.whl", hash = "sha256:7f836217000342d448e1c9a342e9163149e45d5b5eca76a30e84503a5a96cab0"}, - {file = "cryptography-38.0.3-cp36-abi3-win_amd64.whl", hash = "sha256:c46837ea467ed1efea562bbeb543994c2d1f6e800785bd5a2c98bc096f5cb220"}, - {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06fc3cc7b6f6cca87bd56ec80a580c88f1da5306f505876a71c8cfa7050257dd"}, - {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:65535bc550b70bd6271984d9863a37741352b4aad6fb1b3344a54e6950249b55"}, - {file = "cryptography-38.0.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5e89468fbd2fcd733b5899333bc54d0d06c80e04cd23d8c6f3e0542358c6060b"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6ab9516b85bebe7aa83f309bacc5f44a61eeb90d0b4ec125d2d003ce41932d36"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:068147f32fa662c81aebab95c74679b401b12b57494872886eb5c1139250ec5d"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:402852a0aea73833d982cabb6d0c3bb582c15483d29fb7085ef2c42bfa7e38d7"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b1b35d9d3a65542ed2e9d90115dfd16bbc027b3f07ee3304fc83580f26e43249"}, - {file = "cryptography-38.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6addc3b6d593cd980989261dc1cce38263c76954d758c3c94de51f1e010c9a50"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:be243c7e2bfcf6cc4cb350c0d5cdf15ca6383bbcb2a8ef51d3c9411a9d4386f0"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78cf5eefac2b52c10398a42765bfa981ce2372cbc0457e6bf9658f41ec3c41d8"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4e269dcd9b102c5a3d72be3c45d8ce20377b8076a43cbed6f660a1afe365e436"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8d41a46251bf0634e21fac50ffd643216ccecfaf3701a063257fe0b2be1b6548"}, - {file = "cryptography-38.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:785e4056b5a8b28f05a533fab69febf5004458e20dad7e2e13a3120d8ecec75a"}, - {file = "cryptography-38.0.3.tar.gz", hash = "sha256:bfbe6ee19615b07a98b1d2287d6a6073f734735b49ee45b11324d85efc4d5cbd"}, -] -cython = [ - {file = "Cython-0.29.28-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:75686c586e37b1fed0fe4a2c053474f96fc07da0063bbfc98023454540515d31"}, - {file = "Cython-0.29.28-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:16f2e74fcac223c53e298ecead62c353d3cffa107bea5d8232e4b2ba40781634"}, - {file = "Cython-0.29.28-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b6c77cc24861a33714e74212abfab4e54bf42e1ad602623f193b8e369389af2f"}, - {file = "Cython-0.29.28-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:59f4e86b415620a097cf0ec602adf5a7ee3cc33e8220567ded96566f753483f8"}, - {file = "Cython-0.29.28-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:31465dce7fd3f058d02afb98b13af962848cc607052388814428dc801cc26f57"}, - {file = "Cython-0.29.28-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5658fa477e80d96c49d5ff011938dd4b62da9aa428f771b91f1a7c49af45aad8"}, - {file = "Cython-0.29.28-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:33b69ac9bbf2b93d8cae336cfe48889397a857e6ceeb5cef0b2f0b31b6c54f2b"}, - {file = "Cython-0.29.28-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9d39ee7ddef6856413f950b8959e852d83376d9db1c509505e3f4873df32aa70"}, - {file = "Cython-0.29.28-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c9848a423a14e8f51bd4bbf8e2ff37031764ce66bdc7c6bc06c70d4084eb23c7"}, - {file = "Cython-0.29.28-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:09448aadb818387160ca4d1e1b82dbb7001526b6d0bed7529c4e8ac12e3b6f4c"}, - {file = "Cython-0.29.28-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:341917bdb2c95bcf8322aacfe50bbe6b4794880b16fa8b2300330520e123a5e5"}, - {file = "Cython-0.29.28-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fdcef7abb09fd827691e3abe6fd42c6c34beaccfa0bc2df6074f0a49949df6a8"}, - {file = "Cython-0.29.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:43eca77169f855dd04be11921a585c8854a174f30bc925257e92bc7b9197fbd2"}, - {file = "Cython-0.29.28-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7962a78ceb80cdec21345fb5088e675060fa65982030d446069f2d675d30e3cd"}, - {file = "Cython-0.29.28-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ed32c206e1d68056a34b21d2ec0cf0f23d338d6531476a68c73e21e20bd7bb63"}, - {file = "Cython-0.29.28-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a0ed39c63ba52edd03a39ea9d6da6f5326aaee5d333c317feba543270a1b3af5"}, - {file = "Cython-0.29.28-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:ded4fd3da4dee2f4414c35214244e29befa7f6fede3e9be317e765169df2cbc7"}, - {file = "Cython-0.29.28-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e24bd94946ffa37f30fcb865f2340fb6d429a3c7bf87b47b22f7d22e0e68a15c"}, - {file = "Cython-0.29.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:076aa8da83383e2bed0ca5f92c13a7e76e684bc41fe8e438bbed735f5b1c2731"}, - {file = "Cython-0.29.28-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:004387d8b94c64681ee05660d6a234e125396097726cf2f419c0fa2ac38034d6"}, - {file = "Cython-0.29.28-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d6036f6a5a0c7fb1af88889872268b15bf20dd9cefe33a6602d79ba18b8db20f"}, - {file = "Cython-0.29.28-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1612d7439590ba3b8de5f907bf0e54bd8e024eafb8c59261531a7988030c182d"}, - {file = "Cython-0.29.28-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d7d7beb600d5dd551e9322e1393b74286f4a3d4aa387f7bfbaccc1495a98603b"}, - {file = "Cython-0.29.28-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5e82f6b3dc2133b2e0e2c5c63d352d40a695e40cc7ed99f4cbe83334bcf9ab39"}, - {file = "Cython-0.29.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:49076747b731ed78acf203666c3b3c5d664754ea01ca4527f62f6d8675703688"}, - {file = "Cython-0.29.28-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9f2b7c86a73db0d8dbbd885fe67f04c7b787df37a3848b9867270d3484101fbd"}, - {file = "Cython-0.29.28-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a3b27812ac9e9737026bfbb1dd47434f3e84013f430bafe1c6cbaf1cd51b5518"}, - {file = "Cython-0.29.28-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0378a14d2580dcea234d7a2dc8d75f60c091105885096e6dd5b032be97542c16"}, - {file = "Cython-0.29.28-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d7c98727397c2547a56aa0c3c98140f1873c69a0642edc9446c6c870d0d8a5b5"}, - {file = "Cython-0.29.28-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6626f9691ce2093ccbcc9932f449efe3b6e1c893b556910881d177c61612e8ff"}, - {file = "Cython-0.29.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_24_i686.whl", hash = "sha256:e9cc6af0c9c477c5e175e807dce439509934efefc24ea2da9fced7fbc8170591"}, - {file = "Cython-0.29.28-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05edfa51c0ff31a8df3cb291b90ca93ab499686d023b9b81c216cd3509f73def"}, - {file = "Cython-0.29.28-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:4b3089255b6b1cc69e4b854626a41193e6acae5332263d24707976b3cb8ca644"}, - {file = "Cython-0.29.28-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:03b749e4f0bbf631cee472add2806d338a7d496f8383f6fb28cc5fdc34b7fdb8"}, - {file = "Cython-0.29.28-py2.py3-none-any.whl", hash = "sha256:26d8d0ededca42be50e0ac377c08408e18802b1391caa3aea045a72c1bff47ac"}, - {file = "Cython-0.29.28.tar.gz", hash = "sha256:d6fac2342802c30e51426828fe084ff4deb1b3387367cf98976bb2e64b6f8e45"}, -] -decorator = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] -executing = [ - {file = "executing-0.9.1-py2.py3-none-any.whl", hash = "sha256:4ce4d6082d99361c0231fc31ac1a0f56979363cc6819de0b1410784f99e49105"}, - {file = "executing-0.9.1.tar.gz", hash = "sha256:ea278e2cf90cbbacd24f1080dd1f0ac25b71b2e21f50ab439b7ba45dd3195587"}, -] -flake8 = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, -] -flaky = [ - {file = "flaky-3.7.0-py2.py3-none-any.whl", hash = "sha256:d6eda73cab5ae7364504b7c44670f70abed9e75f77dd116352f662817592ec9c"}, - {file = "flaky-3.7.0.tar.gz", hash = "sha256:3ad100780721a1911f57a165809b7ea265a7863305acb66708220820caf8aa0d"}, -] -frozenlist = [ - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"}, - {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"}, - {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"}, - {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"}, - {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"}, - {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"}, - {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"}, - {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"}, - {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"}, - {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"}, - {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"}, - {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"}, - {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"}, - {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"}, - {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"}, - {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"}, - {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"}, - {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"}, - {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, - {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, -] -graphviz = [ - {file = "graphviz-0.20-py3-none-any.whl", hash = "sha256:62c5f48bcc534a45b4588c548ff75e419c1f1f3a33d31a91796ae80a7f581e4a"}, - {file = "graphviz-0.20.zip", hash = "sha256:76bdfb73f42e72564ffe9c7299482f9d72f8e6cb8d54bce7b48ab323755e9ba5"}, -] -hathorlib = [ - {file = "hathorlib-0.2.0-py3-none-any.whl", hash = "sha256:39b500c61aba556a404707f442ca29c17d816b9590e9b3b91d350c26dfd6d4bd"}, - {file = "hathorlib-0.2.0.tar.gz", hash = "sha256:7c62a93de6599a9b8f36a09da69f5892eefc83017e70ca0bcf0084a28e02309e"}, -] -hyperlink = [ - {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, - {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, -] -idna = [ - {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, - {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, -] -incremental = [ - {file = "incremental-21.3.0-py2.py3-none-any.whl", hash = "sha256:92014aebc6a20b78a8084cdd5645eeaa7f74b8933f70fa3ada2cfbd1e3b54321"}, - {file = "incremental-21.3.0.tar.gz", hash = "sha256:02f5de5aff48f6b9f665d99d48bfc7ec03b6e3943210de7cfc88856d755d6f57"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -intervaltree = [ - {file = "intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d"}, -] -ipython = [ - {file = "ipython-8.4.0-py3-none-any.whl", hash = "sha256:7ca74052a38fa25fe9bedf52da0be7d3fdd2fb027c3b778ea78dfe8c212937d1"}, - {file = "ipython-8.4.0.tar.gz", hash = "sha256:f2db3a10254241d9b447232cec8b424847f338d9d36f9a577a6192c332a46abd"}, -] -isort = [ - {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, - {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, -] -jedi = [ - {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, - {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, -] -matplotlib-inline = [ - {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, - {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mnemonic = [ - {file = "mnemonic-0.20-py3-none-any.whl", hash = "sha256:acd2168872d0379e7a10873bb3e12bf6c91b35de758135c4fbd1015ef18fafc5"}, - {file = "mnemonic-0.20.tar.gz", hash = "sha256:7c6fb5639d779388027a77944680aee4870f0fcd09b1e42a5525ee2ce4c625f6"}, -] -multidict = [ - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"}, - {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"}, - {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"}, - {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"}, - {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"}, - {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"}, - {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"}, - {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"}, - {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"}, - {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"}, - {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"}, - {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"}, - {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"}, - {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"}, - {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"}, - {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"}, - {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"}, - {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"}, - {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"}, - {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"}, - {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, - {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, -] -mypy = [ - {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"}, - {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"}, - {file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"}, - {file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"}, - {file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"}, - {file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"}, - {file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"}, - {file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"}, - {file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"}, - {file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"}, - {file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"}, - {file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"}, - {file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"}, - {file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"}, - {file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"}, - {file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"}, - {file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"}, - {file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"}, - {file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"}, - {file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"}, - {file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"}, - {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"}, - {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -mypy-zope = [ - {file = "mypy-zope-0.3.7.tar.gz", hash = "sha256:9da171e78e8ef7ac8922c86af1a62f1b7f3244f121020bd94a2246bc3f33c605"}, - {file = "mypy_zope-0.3.7-py3-none-any.whl", hash = "sha256:9c7637d066e4d1bafa0651abc091c752009769098043b236446e6725be2bc9c2"}, -] -packaging = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, -] -parso = [ - {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, - {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] -pluggy = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, -] -prometheus-client = [ - {file = "prometheus_client-0.14.1-py3-none-any.whl", hash = "sha256:522fded625282822a89e2773452f42df14b5a8e84a86433e3f8a189c1d54dc01"}, - {file = "prometheus_client-0.14.1.tar.gz", hash = "sha256:5459c427624961076277fdc6dc50540e2bacb98eebde99886e59ec55ed92093a"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.29-py3-none-any.whl", hash = "sha256:62291dad495e665fca0bda814e342c69952086afb0f4094d0893d357e5c78752"}, - {file = "prompt_toolkit-3.0.29.tar.gz", hash = "sha256:bd640f60e8cecd74f0dc249713d433ace2ddc62b65ee07f96d358e0b152b6ea7"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -pure-eval = [ - {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, - {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, -] -py = [ - {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, - {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, -] -pyasn1 = [ - {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, - {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, - {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, - {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, - {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, - {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, - {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, - {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, - {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, - {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, -] -pyasn1-modules = [ - {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, - {file = "pyasn1_modules-0.2.8-py2.4.egg", hash = "sha256:0fe1b68d1e486a1ed5473f1302bd991c1611d319bba158e98b106ff86e1d7199"}, - {file = "pyasn1_modules-0.2.8-py2.5.egg", hash = "sha256:fe0644d9ab041506b62782e92b06b8c68cca799e1a9636ec398675459e031405"}, - {file = "pyasn1_modules-0.2.8-py2.6.egg", hash = "sha256:a99324196732f53093a84c4369c996713eb8c89d360a496b599fb1a9c47fc3eb"}, - {file = "pyasn1_modules-0.2.8-py2.7.egg", hash = "sha256:0845a5582f6a02bb3e1bde9ecfc4bfcae6ec3210dd270522fee602365430c3f8"}, - {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, - {file = "pyasn1_modules-0.2.8-py3.1.egg", hash = "sha256:f39edd8c4ecaa4556e989147ebf219227e2cd2e8a43c7e7fcb1f1c18c5fd6a3d"}, - {file = "pyasn1_modules-0.2.8-py3.2.egg", hash = "sha256:b80486a6c77252ea3a3e9b1e360bc9cf28eaac41263d173c032581ad2f20fe45"}, - {file = "pyasn1_modules-0.2.8-py3.3.egg", hash = "sha256:65cebbaffc913f4fe9e4808735c95ea22d7a7775646ab690518c056784bc21b4"}, - {file = "pyasn1_modules-0.2.8-py3.4.egg", hash = "sha256:15b7c67fabc7fc240d87fb9aabf999cf82311a6d6fb2c70d00d3d0604878c811"}, - {file = "pyasn1_modules-0.2.8-py3.5.egg", hash = "sha256:426edb7a5e8879f1ec54a1864f16b882c2837bfd06eee62f2c982315ee2473ed"}, - {file = "pyasn1_modules-0.2.8-py3.6.egg", hash = "sha256:cbac4bc38d117f2a49aeedec4407d23e8866ea4ac27ff2cf7fb3e5b570df19e0"}, - {file = "pyasn1_modules-0.2.8-py3.7.egg", hash = "sha256:c29a5e5cc7a3f05926aff34e097e84f8589cd790ce0ed41b67aed6857b26aafd"}, -] -pycodestyle = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, -] -pycoin = [ - {file = "pycoin-0.92.20220529.tar.gz", hash = "sha256:3d0396475b5e2d9da7a5057eab72be0e088505e8e44680788106236872c542a2"}, -] -pycparser = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, -] -pyflakes = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, -] -pygments = [ - {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, - {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, -] -pyopenssl = [ - {file = "pyOpenSSL-22.0.0-py2.py3-none-any.whl", hash = "sha256:ea252b38c87425b64116f808355e8da644ef9b07e429398bfece610f893ee2e0"}, - {file = "pyOpenSSL-22.0.0.tar.gz", hash = "sha256:660b1b1425aac4a1bea1d94168a85d99f0b3144c869dd4390d27629d0087f1bf"}, -] -pyparsing = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] -pytest = [ - {file = "pytest-7.1.2-py3-none-any.whl", hash = "sha256:13d0e3ccfc2b6e26be000cb6568c832ba67ba32e719443bfe725814d3c42433c"}, - {file = "pytest-7.1.2.tar.gz", hash = "sha256:a06a0425453864a270bc45e71f783330a7428defb4230fb5e6a731fde06ecd45"}, -] -pytest-cov = [ - {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, - {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, -] -pywin32 = [ - {file = "pywin32-304-cp310-cp310-win32.whl", hash = "sha256:3c7bacf5e24298c86314f03fa20e16558a4e4138fc34615d7de4070c23e65af3"}, - {file = "pywin32-304-cp310-cp310-win_amd64.whl", hash = "sha256:4f32145913a2447736dad62495199a8e280a77a0ca662daa2332acf849f0be48"}, - {file = "pywin32-304-cp310-cp310-win_arm64.whl", hash = "sha256:d3ee45adff48e0551d1aa60d2ec066fec006083b791f5c3527c40cd8aefac71f"}, - {file = "pywin32-304-cp311-cp311-win32.whl", hash = "sha256:30c53d6ce44c12a316a06c153ea74152d3b1342610f1b99d40ba2795e5af0269"}, - {file = "pywin32-304-cp311-cp311-win_amd64.whl", hash = "sha256:7ffa0c0fa4ae4077e8b8aa73800540ef8c24530057768c3ac57c609f99a14fd4"}, - {file = "pywin32-304-cp311-cp311-win_arm64.whl", hash = "sha256:cbbe34dad39bdbaa2889a424d28752f1b4971939b14b1bb48cbf0182a3bcfc43"}, - {file = "pywin32-304-cp36-cp36m-win32.whl", hash = "sha256:be253e7b14bc601718f014d2832e4c18a5b023cbe72db826da63df76b77507a1"}, - {file = "pywin32-304-cp36-cp36m-win_amd64.whl", hash = "sha256:de9827c23321dcf43d2f288f09f3b6d772fee11e809015bdae9e69fe13213988"}, - {file = "pywin32-304-cp37-cp37m-win32.whl", hash = "sha256:f64c0377cf01b61bd5e76c25e1480ca8ab3b73f0c4add50538d332afdf8f69c5"}, - {file = "pywin32-304-cp37-cp37m-win_amd64.whl", hash = "sha256:bb2ea2aa81e96eee6a6b79d87e1d1648d3f8b87f9a64499e0b92b30d141e76df"}, - {file = "pywin32-304-cp38-cp38-win32.whl", hash = "sha256:94037b5259701988954931333aafd39cf897e990852115656b014ce72e052e96"}, - {file = "pywin32-304-cp38-cp38-win_amd64.whl", hash = "sha256:ead865a2e179b30fb717831f73cf4373401fc62fbc3455a0889a7ddac848f83e"}, - {file = "pywin32-304-cp39-cp39-win32.whl", hash = "sha256:25746d841201fd9f96b648a248f731c1dec851c9a08b8e33da8b56148e4c65cc"}, - {file = "pywin32-304-cp39-cp39-win_amd64.whl", hash = "sha256:d24a3382f013b21aa24a5cfbfad5a2cd9926610c0affde3e8ab5b3d7dbcf4ac9"}, -] -requests = [ - {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, - {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, -] -rocksdb = [] -sentry-sdk = [ - {file = "sentry-sdk-1.5.12.tar.gz", hash = "sha256:259535ba66933eacf85ab46524188c84dcb4c39f40348455ce15e2c0aca68863"}, - {file = "sentry_sdk-1.5.12-py2.py3-none-any.whl", hash = "sha256:778b53f0a6c83b1ee43d3b7886318ba86d975e686cb2c7906ccc35b334360be1"}, -] -service-identity = [ - {file = "service-identity-21.1.0.tar.gz", hash = "sha256:6e6c6086ca271dc11b033d17c3a8bea9f24ebff920c587da090afc9519419d34"}, - {file = "service_identity-21.1.0-py2.py3-none-any.whl", hash = "sha256:f0b0caac3d40627c3c04d7a51b6e06721857a0e10a8775f2d1d7e72901b3a7db"}, -] -setproctitle = [ - {file = "setproctitle-1.2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0a668acec8b61a971de54bc4c733869ea7b0eb1348eae5a32b9477f788908e5c"}, - {file = "setproctitle-1.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52265182fe5ac237d179d8e949248d307882a2e6ec7f189c8dac1c9d1b3631fa"}, - {file = "setproctitle-1.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d00ef63a1f78e13c236895badac77b6c8503377467b9c1a4f81fe729d16e03"}, - {file = "setproctitle-1.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82a49aaf440232c762539ab3737b5174d31aba0141fd4bf4d8739c28d18624"}, - {file = "setproctitle-1.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:791bed39e4ecbdd008b64999a60c9cc560d17b3836ca0c27cd4708e8e1bcf495"}, - {file = "setproctitle-1.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8e4da68d4d4ba46d4c5db6ae5eb61b11de9c520f25ae8334570f4d0018a8611"}, - {file = "setproctitle-1.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47f97f591ea2335b7d35f5e9ad7d806385338182dc6de5732d091e9c70ed1cc0"}, - {file = "setproctitle-1.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:501c084cf3df7d848e91c97d4f8c44d799ba545858a79c6960326ce6f285b4e4"}, - {file = "setproctitle-1.2.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a39b30d7400c0d50941fe19e1fe0b7d35676186fec4d9c010129ac91b883fd26"}, - {file = "setproctitle-1.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b213376fc779c0e1a4b60008f3fd03f74e9baa9665db37fa6646e98d31baa6d8"}, - {file = "setproctitle-1.2.3-cp310-cp310-win32.whl", hash = "sha256:e24fa9251cc22ddb88ef183070063fdca826c9636381f1c4fb9d2a1dccb7c2a4"}, - {file = "setproctitle-1.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:3b1883ccdbee624386dc046cfbcd80c4e75e24c478f35627984a79892e088b88"}, - {file = "setproctitle-1.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9cf1098205c23fbcaaaef798afaff714fa9ffadf24166f5e85e6d16b9ef82a1"}, - {file = "setproctitle-1.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a546cd2dfaecb227d24122257b98b2e062762871888835c7b608f1c41c3a77ad"}, - {file = "setproctitle-1.2.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e40c35564081983eab6a07f9eb5693867bc447b0edf9c61b69446223d6593814"}, - {file = "setproctitle-1.2.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d083cae02e344e760bd21c28d591ac5f7ddbd6e1a0ecba62092ae724abd5c28"}, - {file = "setproctitle-1.2.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2fa9f4b382a6cf88f2f345044d0916a92f37cac21355585bd14bc7ee91af187"}, - {file = "setproctitle-1.2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:38855b06a124361dc73c198853dee3f2b775531c4f4b7472f0e3d441192b3d8a"}, - {file = "setproctitle-1.2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:a81067bdc015fee1cc148c79b346f24fdad1224a8898b4239c7cbdee1add8a60"}, - {file = "setproctitle-1.2.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:409a39f92e123be061626fdfd3e76625b04db103479bb4ba1c85b587db0b9498"}, - {file = "setproctitle-1.2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:a993610383028f093112dce7f77b262e88fce9d70127535fcdc78953179857e8"}, - {file = "setproctitle-1.2.3-cp36-cp36m-win32.whl", hash = "sha256:4eed53c12146de5df959d84384ffc2774651cab406ee4854e12728cf0eee5297"}, - {file = "setproctitle-1.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:335750c9eb5b18326a138a09266862a52b4f474277c3e410b419bea9a1df8bee"}, - {file = "setproctitle-1.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a72bbe53191fbe574c94c0f8b9451dce535b398b7c47ce2e26e21d55eaa1d7e"}, - {file = "setproctitle-1.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5464e6812d050c986e6e9b97d54ab88c23dbe9d81151a2fa10b48bb5133a1e2c"}, - {file = "setproctitle-1.2.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec7c3a27460ae7811e868e5494e3d8aee5012912744c48fa2d80b5e614b1b972"}, - {file = "setproctitle-1.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01cef383afc7ea7a3b1696818c8712029bf2f1d64f5d4777dbaf0166becf2c00"}, - {file = "setproctitle-1.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c7315e53b49ef2227d47a75c3d28c4c51ea9ee46a066460732c0d0f8e605a7"}, - {file = "setproctitle-1.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0b444ed4051161a3b0a85dec2bb9b50922f37c75f5fb86f7784b235cf6754336"}, - {file = "setproctitle-1.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:be0b46beeb1c92450079a7f30a025d69b63fd6a5de040ebc478fd6e6bf3b63fc"}, - {file = "setproctitle-1.2.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:60f7a2f5da36a3075dda7edbee2173be5b765b0460b8d401ee01a11f68dee1d2"}, - {file = "setproctitle-1.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:138bfa853e607f06d95b0f253e9152b32a00af3d0dbec96abf0871236a483932"}, - {file = "setproctitle-1.2.3-cp37-cp37m-win32.whl", hash = "sha256:e80fc59739a738b5c67afbbb9d1c238aa47b6d290c2ada872b15c819350ec5f8"}, - {file = "setproctitle-1.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a912df3f065572cef211e9ed9f157a0dd2bd73d150281f18f00728afa1b1e5d2"}, - {file = "setproctitle-1.2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d45dbe4171f8c27a515ecb4562f4cd9ef67d98474bea18e0c14dfbdc2b225050"}, - {file = "setproctitle-1.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9d905ac84dde5227de6516ec08639759f99684148bb88ba05f4cbdaebff5d69"}, - {file = "setproctitle-1.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f272b84d79bbe15af26ecf6f7c129bbe642f628866c9253659cdb519216f138f"}, - {file = "setproctitle-1.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fc586f002fd5dd8695718e22a83771fd9f744f081a2b8e614bf6b5f44135964a"}, - {file = "setproctitle-1.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4051c3a3b07f8a4cca205cd45366a22f322da2f26491c0d6b313a10f8c77b734"}, - {file = "setproctitle-1.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25538341e56f9e75e9759229ff674282dccb5b1ce79a974f968d36208d465674"}, - {file = "setproctitle-1.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:fdb2231db176e0848b757fc5d9bed08bc8a498b5b9abb8b640f39e9720f309fc"}, - {file = "setproctitle-1.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0670f2130a7ca0e167d3d5a7c8e3c707340b8693d6af7416ff55c18ab2a0a43f"}, - {file = "setproctitle-1.2.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:9a92978030616f5e20617b7b832efee398df82072b7239c53db41c8026f5fe55"}, - {file = "setproctitle-1.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:28e0df80d5069586a08a3cb463fb23503a37cbb805826ef93164bc4bfb5f35b9"}, - {file = "setproctitle-1.2.3-cp38-cp38-win32.whl", hash = "sha256:35b869e416a105c59133a48b569c6e808159485d916f55e80c7394a42667a386"}, - {file = "setproctitle-1.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:f47f6704880869d8e8f52efac2f2f60f5ed4cb9662b98fc1c7e916eefe76e61d"}, - {file = "setproctitle-1.2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ccb0b5334dbf248f7504d88b5e9e9a09a0da119eeafacd6f7247f7c055443522"}, - {file = "setproctitle-1.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14641a4ec2f2110cf4afc666eaecc82ba67814e927e02647fa1f4cf74476e752"}, - {file = "setproctitle-1.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4a3cb19346a0cd680617742f5e39fdd14596f6fd91d6c9038272663e37441b4"}, - {file = "setproctitle-1.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2ac0ebd9c63c3d19f768966be2f771bf088bc7373c63ed6fcbb3444a30d0f62"}, - {file = "setproctitle-1.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32a84cc309b9e595f06a55bec2fa335a23c307a55d2989864b60ecd71ea87897"}, - {file = "setproctitle-1.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f55493c987935fa540ef9ffb7ee7db03b4a18a9d5cc103681e2e6a6dfbd7054"}, - {file = "setproctitle-1.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f2a137984d3436f13e4bf7c8ca6f6f292df119c009c5e39556cabba4f4bfbf92"}, - {file = "setproctitle-1.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f06ff922254023eaabef6af6631f89e5f2f420cf0112865d57d7703f933d4e9f"}, - {file = "setproctitle-1.2.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:eb06c1086cf8c8cf12ce45a02450befcb408dfd646d0ccb47d388fd6e73c333a"}, - {file = "setproctitle-1.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2c8c245e08f6a296fdaa1b36894ec40e20464a4fc6458e6178c8d55a2f83457a"}, - {file = "setproctitle-1.2.3-cp39-cp39-win32.whl", hash = "sha256:21d6e064b8fee4e58eb00cdd8771c638de1bc30bb6c02d0208af9ca0a1c00898"}, - {file = "setproctitle-1.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:efb3001fd9e71d3ae939d826bf436f0446fd30a6ac01e0ce08cd7eb55ee5ac57"}, - {file = "setproctitle-1.2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3dbe87e76197f9a303451512088c18c96f09a6fc4f871a92e5bd695f46f94a26"}, - {file = "setproctitle-1.2.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b207de9e4f4aa5265b36dd826a1f6ef6566b064a042033bd7447efb7e9a7664"}, - {file = "setproctitle-1.2.3-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ac48a94040ef21be37366cbc8270fcba2ca103d6c64da6099d5a7b034f72d0"}, - {file = "setproctitle-1.2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9fb5d2e66f94eebc3d06cda9e71a3fffef24c5273971180a4b5628a37fae05a5"}, - {file = "setproctitle-1.2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:423f8a6d8116acf975ebf93d6b5c4a752f7d2039fa9aafe175a62de86e17016e"}, - {file = "setproctitle-1.2.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0be45535e934deab3aa72ed1a8487174af4ea12cec124478c68a312e1c8b13"}, - {file = "setproctitle-1.2.3-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65a9384cafdfed98f91416e93705ad08f049c298afcb9c515882beba23153bd0"}, - {file = "setproctitle-1.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d312a170f539895c8093b5e68ba126aa131c9f0d00f6360410db27ec50bf7afa"}, - {file = "setproctitle-1.2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c93a2272740e60cddf59d3e1d35dbb89fcc3676f5ca9618bb4e6ae9633fdf13c"}, - {file = "setproctitle-1.2.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f59444a25fb42ca07f53a4474b1545d97a06f016e6c6b8246eee5b146820b5"}, - {file = "setproctitle-1.2.3-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06aab65e68163ead9d046b452dd9ad1fc6834ce6bde490f63fdce3be53e9cc73"}, - {file = "setproctitle-1.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:97accd117392b1e57e09888792750c403d7729b7e4b193005178b3736b325ea0"}, - {file = "setproctitle-1.2.3.tar.gz", hash = "sha256:ecf28b1c07a799d76f4326e508157b71aeda07b84b90368ea451c0710dbd32c0"}, -] -setuptools = [ - {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, - {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sortedcontainers = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] -stack-data = [ - {file = "stack_data-0.3.0-py3-none-any.whl", hash = "sha256:aa1d52d14d09c7a9a12bb740e6bdfffe0f5e8f4f9218d85e7c73a8c37f7ae38d"}, - {file = "stack_data-0.3.0.tar.gz", hash = "sha256:77bec1402dcd0987e9022326473fdbcc767304892a533ed8c29888dacb7dddbc"}, -] -structlog = [ - {file = "structlog-21.5.0-py3-none-any.whl", hash = "sha256:fd7922e195262b337da85c2a91c84be94ccab1f8fd1957bd6986f6904e3761c8"}, - {file = "structlog-21.5.0.tar.gz", hash = "sha256:68c4c29c003714fe86834f347cb107452847ba52414390a7ee583472bde00fc9"}, -] -structlog-sentry = [ - {file = "structlog-sentry-1.4.0.tar.gz", hash = "sha256:5fc6cfab71b858d71433e68cc5af79a396e72015003931507e340b3687ebb0a8"}, - {file = "structlog_sentry-1.4.0-py3-none-any.whl", hash = "sha256:04627538e13bb0719a8806353279d40c1d1afb3eb2053817820754b9a08814a7"}, -] -tomli = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] -traitlets = [ - {file = "traitlets-5.2.0-py3-none-any.whl", hash = "sha256:9dd4025123fbe018a2092b2ad6984792f53ea3362c698f37473258b1fa97b0bc"}, - {file = "traitlets-5.2.0.tar.gz", hash = "sha256:60474f39bf1d39a11e0233090b99af3acee93bbc2281777e61dd8c87da8a0014"}, -] -twisted = [ - {file = "Twisted-22.4.0-py3-none-any.whl", hash = "sha256:f9f7a91f94932477a9fc3b169d57f54f96c6e74a23d78d9ce54039a7f48928a2"}, - {file = "Twisted-22.4.0.tar.gz", hash = "sha256:a047990f57dfae1e0bd2b7df2526d4f16dcdc843774dc108b78c52f2a5f13680"}, -] -twisted-iocpsupport = [ - {file = "twisted-iocpsupport-1.0.2.tar.gz", hash = "sha256:72068b206ee809c9c596b57b5287259ea41ddb4774d86725b19f35bf56aa32a9"}, - {file = "twisted_iocpsupport-1.0.2-cp310-cp310-win32.whl", hash = "sha256:985c06a33f5c0dae92c71a036d1ea63872ee86a21dd9b01e1f287486f15524b4"}, - {file = "twisted_iocpsupport-1.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:81b3abe3527b367da0220482820cb12a16c661672b7bcfcde328902890d63323"}, - {file = "twisted_iocpsupport-1.0.2-cp36-cp36m-win32.whl", hash = "sha256:9dbb8823b49f06d4de52721b47de4d3b3026064ef4788ce62b1a21c57c3fff6f"}, - {file = "twisted_iocpsupport-1.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:b9fed67cf0f951573f06d560ac2f10f2a4bbdc6697770113a2fc396ea2cb2565"}, - {file = "twisted_iocpsupport-1.0.2-cp37-cp37m-win32.whl", hash = "sha256:b76b4eed9b27fd63ddb0877efdd2d15835fdcb6baa745cb85b66e5d016ac2878"}, - {file = "twisted_iocpsupport-1.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:851b3735ca7e8102e661872390e3bce88f8901bece95c25a0c8bb9ecb8a23d32"}, - {file = "twisted_iocpsupport-1.0.2-cp38-cp38-win32.whl", hash = "sha256:bf4133139d77fc706d8f572e6b7d82871d82ec7ef25d685c2351bdacfb701415"}, - {file = "twisted_iocpsupport-1.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:306becd6e22ab6e8e4f36b6bdafd9c92e867c98a5ce517b27fdd27760ee7ae41"}, - {file = "twisted_iocpsupport-1.0.2-cp39-cp39-win32.whl", hash = "sha256:3c61742cb0bc6c1ac117a7e5f422c129832f0c295af49e01d8a6066df8cfc04d"}, - {file = "twisted_iocpsupport-1.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:b435857b9efcbfc12f8c326ef0383f26416272260455bbca2cd8d8eca470c546"}, - {file = "twisted_iocpsupport-1.0.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:7d972cfa8439bdcb35a7be78b7ef86d73b34b808c74be56dfa785c8a93b851bf"}, -] -txaio = [ - {file = "txaio-22.2.1-py2.py3-none-any.whl", hash = "sha256:41223af4a9d5726e645a8ee82480f413e5e300dd257db94bc38ae12ea48fb2e5"}, - {file = "txaio-22.2.1.tar.gz", hash = "sha256:2e4582b70f04b2345908254684a984206c0d9b50e3074a24a4c55aba21d24d01"}, -] -types-cryptography = [ - {file = "types-cryptography-3.3.21.tar.gz", hash = "sha256:ad1b9c63159c009f8676c7e41a4d595dfb96e8c03affa2e693e1617908bb409e"}, - {file = "types_cryptography-3.3.21-py3-none-any.whl", hash = "sha256:bdeb6dd07280ac724e05f02e0d8ef01fdef729b18bb07d635d64de83171a4e70"}, -] -types-pyopenssl = [ - {file = "types-pyOpenSSL-22.0.3.tar.gz", hash = "sha256:374e3d828017f31be1ce93e2e839208222f2f71447995a9e26979789b9aa2598"}, - {file = "types_pyOpenSSL-22.0.3-py3-none-any.whl", hash = "sha256:cfa34b5cb57c2e4336a5f5b388cbab5abfe150bca66a6fa17104e4e3a252ddf8"}, -] -types-requests = [ - {file = "types-requests-2.27.25.tar.gz", hash = "sha256:805ae7e38fd9d157153066dc4381cf585fd34dfa212f2fc1fece248c05aac571"}, - {file = "types_requests-2.27.25-py3-none-any.whl", hash = "sha256:2444905c89731dbcb6bbcd6d873a04252445df7623917c640e463b2b28d2a708"}, -] -types-urllib3 = [ - {file = "types-urllib3-1.26.14.tar.gz", hash = "sha256:2a2578e4b36341ccd240b00fccda9826988ff0589a44ba4a664bbd69ef348d27"}, - {file = "types_urllib3-1.26.14-py3-none-any.whl", hash = "sha256:5d2388aa76395b1e3999ff789ea5b3283677dad8e9bcf3d9117ba19271fd35d9"}, -] -typing-extensions = [ - {file = "typing_extensions-4.2.0-py3-none-any.whl", hash = "sha256:6657594ee297170d19f67d55c05852a874e7eb634f4f753dbd667855e07c1708"}, - {file = "typing_extensions-4.2.0.tar.gz", hash = "sha256:f1c24655a0da0d1b67f07e17a5e6b2a105894e6824b92096378bb3668ef02376"}, -] -urllib3 = [ - {file = "urllib3-1.26.9-py2.py3-none-any.whl", hash = "sha256:44ece4d53fb1706f667c9bd1c648f5469a2ec925fcf3a776667042d645472c14"}, - {file = "urllib3-1.26.9.tar.gz", hash = "sha256:aabaf16477806a5e1dd19aa41f8c2b7950dd3c746362d7e3223dbe6de6ac448e"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -yarl = [ - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, - {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, - {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, - {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, - {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, - {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, - {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, - {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, - {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, - {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, - {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, - {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, - {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, - {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, - {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, - {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, - {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, - {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, - {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, - {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, - {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, - {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, - {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, - {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, - {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, - {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, - {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, -] -"zope.event" = [ - {file = "zope.event-4.5.0-py2.py3-none-any.whl", hash = "sha256:2666401939cdaa5f4e0c08cf7f20c9b21423b95e88f4675b1443973bdb080c42"}, - {file = "zope.event-4.5.0.tar.gz", hash = "sha256:5e76517f5b9b119acf37ca8819781db6c16ea433f7e2062c4afc2b6fbedb1330"}, -] -"zope.interface" = [ - {file = "zope.interface-5.4.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:7df1e1c05304f26faa49fa752a8c690126cf98b40b91d54e6e9cc3b7d6ffe8b7"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2c98384b254b37ce50eddd55db8d381a5c53b4c10ee66e1e7fe749824f894021"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:08f9636e99a9d5410181ba0729e0408d3d8748026ea938f3b970a0249daa8192"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:0ea1d73b7c9dcbc5080bb8aaffb776f1c68e807767069b9ccdd06f27a161914a"}, - {file = "zope.interface-5.4.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:273f158fabc5ea33cbc936da0ab3d4ba80ede5351babc4f577d768e057651531"}, - {file = "zope.interface-5.4.0-cp27-cp27m-win32.whl", hash = "sha256:a1e6e96217a0f72e2b8629e271e1b280c6fa3fe6e59fa8f6701bec14e3354325"}, - {file = "zope.interface-5.4.0-cp27-cp27m-win_amd64.whl", hash = "sha256:877473e675fdcc113c138813a5dd440da0769a2d81f4d86614e5d62b69497155"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:f7ee479e96f7ee350db1cf24afa5685a5899e2b34992fb99e1f7c1b0b758d263"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:b0297b1e05fd128d26cc2460c810d42e205d16d76799526dfa8c8ccd50e74959"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:af310ec8335016b5e52cae60cda4a4f2a60a788cbb949a4fbea13d441aa5a09e"}, - {file = "zope.interface-5.4.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:9a9845c4c6bb56e508651f005c4aeb0404e518c6f000d5a1123ab077ab769f5c"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0b465ae0962d49c68aa9733ba92a001b2a0933c317780435f00be7ecb959c702"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5dd9ca406499444f4c8299f803d4a14edf7890ecc595c8b1c7115c2342cadc5f"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:469e2407e0fe9880ac690a3666f03eb4c3c444411a5a5fddfdabc5d184a79f05"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:52de7fc6c21b419078008f697fd4103dbc763288b1406b4562554bd47514c004"}, - {file = "zope.interface-5.4.0-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:3dd4952748521205697bc2802e4afac5ed4b02909bb799ba1fe239f77fd4e117"}, - {file = "zope.interface-5.4.0-cp35-cp35m-win32.whl", hash = "sha256:dd93ea5c0c7f3e25335ab7d22a507b1dc43976e1345508f845efc573d3d779d8"}, - {file = "zope.interface-5.4.0-cp35-cp35m-win_amd64.whl", hash = "sha256:3748fac0d0f6a304e674955ab1365d515993b3a0a865e16a11ec9d86fb307f63"}, - {file = "zope.interface-5.4.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:66c0061c91b3b9cf542131148ef7ecbecb2690d48d1612ec386de9d36766058f"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d0c1bc2fa9a7285719e5678584f6b92572a5b639d0e471bb8d4b650a1a910920"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2876246527c91e101184f63ccd1d716ec9c46519cc5f3d5375a3351c46467c46"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:334701327f37c47fa628fc8b8d28c7d7730ce7daaf4bda1efb741679c2b087fc"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:71aace0c42d53abe6fc7f726c5d3b60d90f3c5c055a447950ad6ea9cec2e37d9"}, - {file = "zope.interface-5.4.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:5bb3489b4558e49ad2c5118137cfeaf59434f9737fa9c5deefc72d22c23822e2"}, - {file = "zope.interface-5.4.0-cp36-cp36m-win32.whl", hash = "sha256:1c0e316c9add0db48a5b703833881351444398b04111188069a26a61cfb4df78"}, - {file = "zope.interface-5.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6f0c02cbb9691b7c91d5009108f975f8ffeab5dff8f26d62e21c493060eff2a1"}, - {file = "zope.interface-5.4.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:7d97a4306898b05404a0dcdc32d9709b7d8832c0c542b861d9a826301719794e"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:867a5ad16892bf20e6c4ea2aab1971f45645ff3102ad29bd84c86027fa99997b"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5f931a1c21dfa7a9c573ec1f50a31135ccce84e32507c54e1ea404894c5eb96f"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:194d0bcb1374ac3e1e023961610dc8f2c78a0f5f634d0c737691e215569e640d"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:8270252effc60b9642b423189a2fe90eb6b59e87cbee54549db3f5562ff8d1b8"}, - {file = "zope.interface-5.4.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:15e7d1f7a6ee16572e21e3576d2012b2778cbacf75eb4b7400be37455f5ca8bf"}, - {file = "zope.interface-5.4.0-cp37-cp37m-win32.whl", hash = "sha256:8892f89999ffd992208754851e5a052f6b5db70a1e3f7d54b17c5211e37a98c7"}, - {file = "zope.interface-5.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2e5a26f16503be6c826abca904e45f1a44ff275fdb7e9d1b75c10671c26f8b94"}, - {file = "zope.interface-5.4.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:0f91b5b948686659a8e28b728ff5e74b1be6bf40cb04704453617e5f1e945ef3"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:4de4bc9b6d35c5af65b454d3e9bc98c50eb3960d5a3762c9438df57427134b8e"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:bf68f4b2b6683e52bec69273562df15af352e5ed25d1b6641e7efddc5951d1a7"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:63b82bb63de7c821428d513607e84c6d97d58afd1fe2eb645030bdc185440120"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:db1fa631737dab9fa0b37f3979d8d2631e348c3b4e8325d6873c2541d0ae5a48"}, - {file = "zope.interface-5.4.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f44e517131a98f7a76696a7b21b164bcb85291cee106a23beccce454e1f433a4"}, - {file = "zope.interface-5.4.0-cp38-cp38-win32.whl", hash = "sha256:a9506a7e80bcf6eacfff7f804c0ad5350c8c95b9010e4356a4b36f5322f09abb"}, - {file = "zope.interface-5.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:3c02411a3b62668200910090a0dff17c0b25aaa36145082a5a6adf08fa281e54"}, - {file = "zope.interface-5.4.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:0cee5187b60ed26d56eb2960136288ce91bcf61e2a9405660d271d1f122a69a4"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a8156e6a7f5e2a0ff0c5b21d6bcb45145efece1909efcbbbf48c56f8da68221d"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:205e40ccde0f37496904572035deea747390a8b7dc65146d30b96e2dd1359a83"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:3f24df7124c323fceb53ff6168da70dbfbae1442b4f3da439cd441681f54fe25"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:5208ebd5152e040640518a77827bdfcc73773a15a33d6644015b763b9c9febc1"}, - {file = "zope.interface-5.4.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:17776ecd3a1fdd2b2cd5373e5ef8b307162f581c693575ec62e7c5399d80794c"}, - {file = "zope.interface-5.4.0-cp39-cp39-win32.whl", hash = "sha256:d4d9d6c1a455d4babd320203b918ccc7fcbefe308615c521062bc2ba1aa4d26e"}, - {file = "zope.interface-5.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:0cba8477e300d64a11a9789ed40ee8932b59f9ee05f85276dbb4b59acee5dd09"}, - {file = "zope.interface-5.4.0.tar.gz", hash = "sha256:5dba5f530fec3f0988d83b78cc591b58c0b6eb8431a85edd1569a0539a8a5a0e"}, -] -"zope.schema" = [ - {file = "zope.schema-6.2.0-py2.py3-none-any.whl", hash = "sha256:03150d8670549590b45109e06b7b964f4e751fa9cb5297ec4985c3bc38641b07"}, - {file = "zope.schema-6.2.0.tar.gz", hash = "sha256:2201aef8ad75ee5a881284d7a6acd384661d6dca7bde5e80a22839a77124595b"}, -] +lock-version = "2.0" +python-versions = ">=3.8.1,<4" +content-hash = "d1b70ed0381b8680d8c4ebf91fca2519fdc51ecb71b32833efac6747cc7ce183" diff --git a/pyproject.toml b/pyproject.toml index f99169118..ac9e82044 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ [tool.poetry] name = "hathor" -version = "0.52.3" +version = "0.53.0" description = "Hathor Network full-node" authors = ["Hathor Team "] license = "Apache-2.0" @@ -26,6 +26,7 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "License :: OSI Approved :: Apache Software License", "Private :: Do Not Upload", @@ -36,47 +37,46 @@ exclude = ["tests", "tests.*"] hathor-cli = 'hathor.cli.main:main' [tool.poetry.dev-dependencies] -flake8 = "~4.0.1" +flake8 = "~6.0.0" isort = {version = "~5.10.1", extras = ["colors"]} -mypy = {version = "^0.950", markers = "implementation_name == 'cpython'"} -mypy-zope = {version = "^0.3", markers = "implementation_name == 'cpython'"} -pytest = "~7.1.2" -pytest-cov = "~3.0.0" +mypy = {version = "^1.0.0", markers = "implementation_name == 'cpython'"} +mypy-zope = {version = "^0.9.0", markers = "implementation_name == 'cpython'"} +pytest = "~7.2.0" +pytest-cov = "~4.0.0" flaky = "~3.7.0" +pytest-xdist = "~3.2.0" # stubs: -types-requests = "=2.27.25" -types-pyopenssl = "=22.0.3" +types-requests = "=2.28.11.4" +types-pyopenssl = "=22.1.0.2" [tool.poetry.dependencies] -python = ">=3.8,<4" -twisted = "~22.4.0" -autobahn = "~22.4.2" +python = ">=3.8.1,<4" +twisted = "~22.10.0" +autobahn = "~22.7.1" base58 = "~2.1.1" -colorama = "~0.4.4" +colorama = "~0.4.6" configargparse = "~1.5.3" cryptography = "~38.0.3" -graphviz = "~0.20" -ipython = "~8.4.0" +graphviz = "~0.20.1" +ipython = "~8.7.0" mnemonic = "~0.20" -prometheus_client = "~0.14.1" -pyopenssl = "=22.0.0" +prometheus_client = "~0.15.0" +pyopenssl = "=22.1.0" pycoin = "~0.92" -pywin32 = {version = "304", markers = "sys_platform == 'win32'"} -requests = "=2.27.1" +pywin32 = {version = "305", markers = "sys_platform == 'win32'"} +requests = "=2.28.1" service_identity = "~21.1.0" pexpect = "~4.8.0" intervaltree = "~3.1.0" -structlog = "~21.5.0" +structlog = "~22.3.0" rocksdb = {git = "https://github.com/hathornetwork/python-rocksdb.git", markers = "sys_platform != 'win32'"} -aiohttp = "~3.8.1" -idna = "~3.3" +aiohttp = "~3.8.3" +idna = "~3.4" setproctitle = "^1.2.2" sentry-sdk = {version = "^1.5.11", optional = true} structlog-sentry = {version = "^1.4.0", optional = true} -hathorlib = "0.2.0" -# move the following to "build-system.requires" when this poetry pr is merged and released: https://github.com/python-poetry/poetry/pull/2794 -# needed to build python-rocksdb -cython = "<0.30" +hathorlib = "0.3.0" +pydantic = "~1.10.6" [tool.poetry.extras] sentry = ["sentry-sdk", "structlog-sentry"] @@ -99,7 +99,10 @@ warn_unused_ignores = true namespace_packages = true show_error_codes = true show_error_context = true -plugins = ["mypy_zope:plugin"] +plugins = [ + "pydantic.mypy", + "mypy_zope:plugin" +] # these are modules which we don't have type stubs for [[tool.mypy.overrides]] @@ -124,13 +127,19 @@ module = [ ] ignore_missing_imports = true +[tool.pydantic-mypy] +init_typed = true +init_forbid_extra = true +warn_untyped_fields = true + [tool.pytest.ini_options] minversion = "6.0" testpaths = ["tests"] +addopts = "-n auto" markers = [ "slow", ] [build-system] -requires = ["poetry-core"] +requires = ["poetry-core >= 1.3.2", "cython < 0.30"] build-backend = "poetry.core.masonry.api" diff --git a/tests/consensus/test_soft_voided2.py b/tests/consensus/test_soft_voided2.py index 9628fdb04..67f24d1d4 100644 --- a/tests/consensus/test_soft_voided2.py +++ b/tests/consensus/test_soft_voided2.py @@ -56,8 +56,8 @@ def do_step(self, i, manager1, tx_base): self.graphviz.labels[txF1.hash] = f'txF1-{i}' if not self.skip_asserts: - self.assertIn(txF1.hash, manager1.soft_voided_tx_ids) - self.assertIn(txF2.hash, manager1.soft_voided_tx_ids) + self.assertIn(txF1.hash, manager1.consensus_algorithm.soft_voided_tx_ids) + self.assertIn(txF2.hash, manager1.consensus_algorithm.soft_voided_tx_ids) txG = add_custom_tx(manager1, [(txF2, 0)], base_parent=tx_base, address=address) self.graphviz.labels[txG.hash] = f'txG-{i}' diff --git a/tests/event/test_base_event.py b/tests/event/test_base_event.py new file mode 100644 index 000000000..90b0b3e76 --- /dev/null +++ b/tests/event/test_base_event.py @@ -0,0 +1,112 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from pydantic import ValidationError + +from hathor.event.model.base_event import BaseEvent +from hathor.event.model.event_data import ReorgData +from hathor.event.model.event_type import EventType +from tests.utils import EventMocker + + +@pytest.mark.parametrize('event_id', [0, 1, 1000]) +@pytest.mark.parametrize('group_id', [None, 0, 1, 1000]) +def test_create_base_event(event_id, group_id): + event = BaseEvent( + peer_id='some_peer', + id=event_id, + timestamp=123.3, + type=EventType.VERTEX_METADATA_CHANGED, + data=EventMocker.tx_data, + group_id=group_id + ) + + expected = dict( + peer_id='some_peer', + id=event_id, + timestamp=123.3, + type='VERTEX_METADATA_CHANGED', + data=dict( + hash='abc', + nonce=123, + timestamp=456, + version=1, + weight=10.0, + inputs=[], + outputs=[], + parents=[], + token_name=None, + token_symbol=None, + tokens=[], + metadata=dict( + hash='abc', + spent_outputs=[], + conflict_with=[], + first_block=None, + voided_by=[], + received_by=[], + children=[], + twins=[], + accumulated_weight=10.0, + score=20.0, + height=100, + validation='validation' + ) + ), + group_id=group_id + ) + + assert event.dict() == expected + + +@pytest.mark.parametrize('event_id', [None, -1, -1000]) +def test_create_base_event_fail_id(event_id): + with pytest.raises(ValidationError): + BaseEvent( + peer_id='some_peer', + id=event_id, + timestamp=123.3, + type=EventType.VERTEX_METADATA_CHANGED, + data=EventMocker.tx_data, + ) + + +@pytest.mark.parametrize('group_id', [-1, -1000]) +def test_create_base_event_fail_group_id(group_id): + with pytest.raises(ValidationError): + BaseEvent( + peer_id='some_peer', + id=0, + timestamp=123.3, + type=EventType.VERTEX_METADATA_CHANGED, + data=EventMocker.tx_data, + group_id=group_id + ) + + +def test_create_base_event_fail_data_type(): + with pytest.raises(ValidationError): + BaseEvent( + peer_id='some_peer', + id=0, + timestamp=123.3, + type=EventType.VERTEX_METADATA_CHANGED, + data=ReorgData( + reorg_size=10, + previous_best_block='a', + new_best_block='b', + common_block='c' + ), + ) diff --git a/tests/event/test_event_manager.py b/tests/event/test_event_manager.py index 302a6d976..1b4f7250f 100644 --- a/tests/event/test_event_manager.py +++ b/tests/event/test_event_manager.py @@ -1,4 +1,8 @@ +from unittest.mock import Mock + +from hathor.event.model.event_type import EventType from hathor.event.storage.memory_storage import EventMemoryStorage +from hathor.event.websocket import EventWebsocketFactory from hathor.pubsub import HathorEvents from tests import unittest @@ -8,69 +12,79 @@ class BaseEventManagerTest(unittest.TestCase): def setUp(self): super().setUp() - self.event_storage = EventMemoryStorage() self.network = 'testnet' - self.manager = self.create_peer(self.network, event_storage=self.event_storage) - self.event_manager = self.manager.event_manager + self.event_storage = EventMemoryStorage() + self.manager = self.create_peer( + self.network, + event_ws_factory=Mock(spec_set=EventWebsocketFactory), + full_verification=False, + event_storage=self.event_storage + ) def test_if_event_is_persisted(self): - self.manager.pubsub.publish(HathorEvents.NETWORK_BEST_BLOCK_FOUND, - event={"test": "test1"}) + block = self.manager.tx_storage.get_best_block() + self.manager.pubsub.publish(HathorEvents.NETWORK_NEW_TX_ACCEPTED, tx=block) self.run_to_completion() self.assertIsNotNone(self.event_storage.get_event(0)) + def _fake_reorg_started(self): + block = self.manager.tx_storage.get_best_block() + # XXX: since we're faking these events, they don't neet to be consistent + self.manager.pubsub.publish(HathorEvents.REORG_STARTED, old_best_height=1, old_best_block=block, + new_best_height=1, new_best_block=block, reorg_size=1, common_block=block) + + def _fake_reorg_finished(self): + self.manager.pubsub.publish(HathorEvents.REORG_FINISHED) + def test_event_group(self): - self.manager.pubsub.publish(HathorEvents.REORG_STARTED, - event={"test": "test1"}) - self.manager.pubsub.publish(HathorEvents.REORG_FINISHED, - event={"test": "test2"}) - self.manager.pubsub.publish(HathorEvents.REORG_STARTED, - event={"test": "test3"}) - self.manager.pubsub.publish(HathorEvents.REORG_FINISHED, - event={"test": "test4"}) + self._fake_reorg_started() + self._fake_reorg_finished() + self._fake_reorg_started() + self._fake_reorg_finished() self.run_to_completion() - event1 = self.event_storage.get_event(0) - event2 = self.event_storage.get_event(1) - event3 = self.event_storage.get_event(2) - event4 = self.event_storage.get_event(3) - self.assertEqual(HathorEvents(event1.type), HathorEvents.REORG_STARTED) + event0 = self.event_storage.get_event(0) + event1 = self.event_storage.get_event(1) + event2 = self.event_storage.get_event(2) + event3 = self.event_storage.get_event(3) + event4 = self.event_storage.get_event(4) + self.assertEqual(EventType(event0.type), EventType.LOAD_FINISHED) + self.assertEqual(EventType(event1.type), EventType.REORG_STARTED) self.assertIsNotNone(event1.group_id) - self.assertEqual(HathorEvents(event2.type), HathorEvents.REORG_FINISHED) + self.assertEqual(EventType(event2.type), EventType.REORG_FINISHED) self.assertIsNotNone(event2.group_id) self.assertEqual(event1.group_id, event2.group_id) self.assertNotEqual(event2.group_id, event3.group_id) self.assertEqual(event3.group_id, event4.group_id) def test_cannot_start_group_twice(self): - self.manager.pubsub.publish(HathorEvents.REORG_STARTED, - event={"test": "test1"}) + self._fake_reorg_started() self.run_to_completion() with self.assertRaises(AssertionError): - self.manager.pubsub.publish(HathorEvents.REORG_STARTED, - event={"test": "test1"}) + self._fake_reorg_started() self.run_to_completion() def test_cannot_finish_group_that_was_not_started(self): with self.assertRaises(AssertionError): - self.manager.pubsub.publish(HathorEvents.REORG_FINISHED, - event={"test": "test1"}) + self._fake_reorg_finished() self.run_to_completion() def test_cannot_finish_group_twice(self): - self.manager.pubsub.publish(HathorEvents.REORG_STARTED, - event={"test": "test1"}) - self.manager.pubsub.publish(HathorEvents.REORG_FINISHED, - event={"test": "test2"}) + self._fake_reorg_started() + self._fake_reorg_finished() self.run_to_completion() with self.assertRaises(AssertionError): - self.manager.pubsub.publish(HathorEvents.REORG_FINISHED, - event={"test": "test3"}) + self._fake_reorg_finished() self.run_to_completion() -class EventManagerWithSyncV1(unittest.SyncV1Params, BaseEventManagerTest): +class SyncV1EventManager(unittest.SyncV1Params, BaseEventManagerTest): __test__ = True -class EventManagerWithSyncV2(unittest.SyncV1Params, BaseEventManagerTest): +class SyncV2EventManager(unittest.SyncV1Params, BaseEventManagerTest): __test__ = True + + +# sync-bridge should behave like sync-v2 +class SyncBridgeEventManagerTest(unittest.SyncBridgeParams, SyncV2EventManager): + pass diff --git a/tests/event/test_event_reorg.py b/tests/event/test_event_reorg.py new file mode 100644 index 000000000..fa69a4fac --- /dev/null +++ b/tests/event/test_event_reorg.py @@ -0,0 +1,130 @@ +from unittest.mock import Mock + +from hathor.conf import HathorSettings +from hathor.event.model.event_type import EventType +from hathor.event.storage import EventMemoryStorage +from hathor.event.websocket import EventWebsocketFactory +from tests import unittest +from tests.utils import add_new_blocks, get_genesis_key, zip_chunkify + +settings = HathorSettings() + + +class BaseEventReorgTest(unittest.TestCase): + __test__ = False + + def setUp(self): + super().setUp() + self.network = 'testnet' + self.event_storage = EventMemoryStorage() + self.manager = self.create_peer( + self.network, + event_ws_factory=Mock(spec_set=EventWebsocketFactory), + full_verification=False, + event_storage=self.event_storage + ) + + # read genesis keys + self.genesis_private_key = get_genesis_key() + self.genesis_public_key = self.genesis_private_key.public_key() + + def test_reorg_events(self): + assert settings.REWARD_SPEND_MIN_BLOCKS == 10, 'this test was made with this hardcoded value in mind' + + # add some blocks + blocks = add_new_blocks(self.manager, settings.REWARD_SPEND_MIN_BLOCKS, advance_clock=1) + + # make a re-org + self.log.debug('make reorg block') + block_to_replace = blocks[8] + tb0 = self.manager.make_custom_block_template(block_to_replace.parents[0], block_to_replace.parents[1:]) + b0 = tb0.generate_mining_block(self.manager.rng, storage=self.manager.tx_storage) + b0.weight = 10 + b0.resolve() + b0.verify() + self.manager.propagate_tx(b0, fails_silently=False) + self.log.debug('reorg block propagated') + self.run_to_completion() + + # check events + actual_events = list(self.event_storage.iter_from_event(0)) + + # events are separated into portions that are sorted (indicated by using lists) and portions that are unsorted + # (indicated by using a custom class), the unsorted parts mean that the given events must be present, but not + # necessarily in the given order, to check that we sort both the expected and actual events by tx hash to be + # able to match them, but only for the "unsorted" portions will, for the "sorted" portions the order is + # expected to be the given one + class unsorted(list): + pass + expected_events_grouped = [ + [ + (EventType.LOAD_FINISHED, {}) + ], + # XXX: the order of the following events can vary depending on which genesis is spent/confirmed first + unsorted([ + (EventType.VERTEX_METADATA_CHANGED, {'hash': settings.GENESIS_TX1_HASH.hex()}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': settings.GENESIS_TX2_HASH.hex()}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[0].hash_hex}), + ]), + # XXX: these events must always have this order + [ + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[0].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[1].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[1].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[2].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[2].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[3].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[3].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[4].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[4].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[5].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[5].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[6].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[6].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[7].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[7].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[8].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[8].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[9].hash_hex}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': blocks[9].hash_hex}), + (EventType.REORG_STARTED, {'reorg_size': 2, 'previous_best_block': blocks[9].hash_hex, + 'new_best_block': b0.hash_hex}), + ], + # XXX: for some reason the metadata update order of these events isn't always the same + unsorted([ + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[8].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': blocks[9].hash_hex}), + (EventType.VERTEX_METADATA_CHANGED, {'hash': b0.hash_hex}), + ]), + # XXX: these events must always have this order + [ + (EventType.REORG_FINISHED, {}), + (EventType.NEW_VERTEX_ACCEPTED, {'hash': b0.hash_hex}), + ], + ] + + for actual_events, expected_events in zip_chunkify(actual_events, expected_events_grouped): + if isinstance(expected_events, unsorted): + actual_events.sort(key=lambda i: i.data.hash) + expected_events.sort(key=lambda i: i[1].get('hash', '')) + + for actual_event, expected_event in zip(actual_events, expected_events): + expected_event_type, expected_partial_data = expected_event + + self.assertEqual(EventType(actual_event.type), expected_event_type) + + for expected_data_key, expected_data_value in expected_partial_data.items(): + self.assertEqual(actual_event.data.dict()[expected_data_key], expected_data_value) + + +class SyncV1EventReorgTest(unittest.SyncV1Params, BaseEventReorgTest): + __test__ = True + + +class SyncV2EventReorgTest(unittest.SyncV1Params, BaseEventReorgTest): + __test__ = True + + +# sync-bridge should behave like sync-v2 +class SyncBridgeEventReorgTest(unittest.SyncBridgeParams, SyncV2EventReorgTest): + pass diff --git a/tests/event/test_event_storage.py b/tests/event/test_event_storage.py index aa99b9276..f2441f3a5 100644 --- a/tests/event/test_event_storage.py +++ b/tests/event/test_event_storage.py @@ -23,10 +23,15 @@ def test_save_event_and_retrieve(self): assert event_retrieved == event - def test_get_key_nonpositive(self): - with self.assertRaises(ValueError): + def test_get_negative_key(self): + with self.assertRaises(ValueError) as cm: self.event_storage.get_event(-1) + self.assertEqual( + 'event.id \'-1\' must be non-negative', + str(cm.exception) + ) + def test_get_nonexistent_event(self): assert self.event_storage.get_event(0) is None assert self.event_storage.get_event(9999) is None @@ -41,15 +46,61 @@ def test_save_events_and_retrieve_the_last(self): assert event_retrieved.id == last_event.id def test_save_non_sequential(self): - last_event = None for i in range(10): - last_event = self.event_mocker.generate_mocked_event(i) - self.event_storage.save_event(last_event) + event = self.event_mocker.generate_mocked_event(i) + self.event_storage.save_event(event) + + non_sequential_event = self.event_mocker.generate_mocked_event(100) - non_sequential_event = self.event_mocker.generate_mocked_event(11) - with self.assertRaises(ValueError): + with self.assertRaises(ValueError) as cm: self.event_storage.save_event(non_sequential_event) + self.assertEqual( + 'invalid event.id, ids must be sequential and leave no gaps', + str(cm.exception) + ) + + def test_iter_from_event_empty(self): + self._test_iter_from_event(0) + + def test_iter_from_event_single(self): + self._test_iter_from_event(1) + + def test_iter_from_event_multiple(self): + self._test_iter_from_event(20) + + def _test_iter_from_event(self, n_events): + expected_events = [] + for i in range(n_events): + event = self.event_mocker.generate_mocked_event(i) + expected_events.append(event) + self.event_storage.save_event(event) + + actual_events = list(self.event_storage.iter_from_event(0)) + + self.assertEqual(expected_events, actual_events) + + def test_iter_from_event_negative_key(self): + with self.assertRaises(ValueError) as cm: + events = self.event_storage.iter_from_event(-10) + list(events) + + self.assertEqual( + 'event.id \'-10\' must be non-negative', + str(cm.exception) + ) + + def test_save_events_and_retrieve_last_group_id(self): + expected_group_id = 4 + for i in range(10): + group_id = i if i <= expected_group_id else None + event = self.event_mocker.generate_mocked_event(i, group_id) + self.event_storage.save_event(event) + + actual_group_id = self.event_storage.get_last_group_id() + + assert expected_group_id == actual_group_id + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') class EventStorageRocksDBTest(EventStorageBaseTest): diff --git a/tests/event/websocket/__init__.py b/tests/event/websocket/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/event/websocket/test_factory.py b/tests/event/websocket/test_factory.py new file mode 100644 index 000000000..d81509f27 --- /dev/null +++ b/tests/event/websocket/test_factory.py @@ -0,0 +1,147 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest.mock import Mock, call + +import pytest + +from hathor.event.storage import EventMemoryStorage +from hathor.event.websocket.factory import EventWebsocketFactory +from hathor.event.websocket.protocol import EventWebsocketProtocol +from hathor.event.websocket.response import EventResponse, InvalidRequestType +from hathor.simulator.clock import HeapClock +from tests.utils import EventMocker + + +def test_started_register(): + factory = _get_factory() + connection = Mock(spec_set=EventWebsocketProtocol) + connection.send_invalid_request_response = Mock() + + factory.start() + factory.register(connection) + + connection.send_invalid_request_response.assert_not_called() + + +def test_non_started_register(): + factory = _get_factory() + connection = Mock(spec_set=EventWebsocketProtocol) + connection.send_invalid_request_response = Mock() + + factory.register(connection) + + connection.send_invalid_request_response.assert_called_once_with(InvalidRequestType.EVENT_WS_NOT_RUNNING) + + +def test_stopped_register(): + factory = _get_factory() + connection = Mock(spec_set=EventWebsocketProtocol) + connection.send_invalid_request_response = Mock() + + factory.start() + factory.stop() + factory.register(connection) + + connection.send_invalid_request_response.assert_called_once_with(InvalidRequestType.EVENT_WS_NOT_RUNNING) + + +@pytest.mark.parametrize('can_receive_event', [False, True]) +def test_broadcast_event(can_receive_event: bool) -> None: + n_starting_events = 10 + factory = _get_factory(n_starting_events) + event = EventMocker.create_event(n_starting_events - 1) + connection = Mock(spec_set=EventWebsocketProtocol) + connection.can_receive_event = Mock(return_value=can_receive_event) + connection.send_event_response = Mock() + + factory.start() + factory.register(connection) + factory.broadcast_event(event) + + if not can_receive_event: + return connection.send_event_response.assert_not_called() + + response = EventResponse(event=event, latest_event_id=n_starting_events - 1) + connection.send_event_response.assert_called_once_with(response) + + +def test_broadcast_multiple_events_multiple_connections(): + factory = _get_factory(10) + connection1 = Mock(spec_set=EventWebsocketProtocol) + connection1.can_receive_event = Mock(return_value=True) + connection1.send_event_response = Mock() + connection2 = Mock(spec_set=EventWebsocketProtocol) + connection2.can_receive_event = Mock(return_value=True) + connection2.send_event_response = Mock() + + factory.start() + factory.register(connection1) + factory.register(connection2) + + for event_id in range(10): + event = EventMocker.create_event(event_id) + factory.broadcast_event(event) + + assert connection1.send_event_response.call_count == 10 + assert connection2.send_event_response.call_count == 10 + + +@pytest.mark.parametrize( + ['next_expected_event_id', 'can_receive_event'], + [ + (0, False), + (0, True), + (3, True), + (10, True) + ] +) +def test_send_next_event_to_connection(next_expected_event_id: int, can_receive_event: bool) -> None: + n_starting_events = 10 + clock = HeapClock() + factory = _get_factory(n_starting_events, clock) + connection = Mock(spec_set=EventWebsocketProtocol) + connection.send_event_response = Mock() + connection.can_receive_event = Mock(return_value=can_receive_event) + connection.next_expected_event_id = Mock( + side_effect=lambda: next_expected_event_id + connection.send_event_response.call_count + ) + + factory.start() + factory.register(connection) + factory.send_next_event_to_connection(connection) + + clock.advance(0) + + if not can_receive_event or next_expected_event_id > n_starting_events - 1: + return connection.send_event_response.assert_not_called() + + calls = [] + for _id in range(next_expected_event_id, n_starting_events): + event = EventMocker.create_event(_id) + response = EventResponse(event=event, latest_event_id=n_starting_events - 1) + calls.append(call(response)) + + assert connection.send_event_response.call_count == n_starting_events - next_expected_event_id + connection.send_event_response.assert_has_calls(calls) + + +def _get_factory(n_starting_events: int = 0, clock: HeapClock = HeapClock()) -> EventWebsocketFactory: + event_storage = EventMemoryStorage() + + for event_id in range(n_starting_events): + event = EventMocker.create_event(event_id) + event_storage.save_event(event) + + return EventWebsocketFactory(clock, event_storage) diff --git a/tests/event/websocket/test_protocol.py b/tests/event/websocket/test_protocol.py new file mode 100644 index 000000000..0ef5ded16 --- /dev/null +++ b/tests/event/websocket/test_protocol.py @@ -0,0 +1,349 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional +from unittest.mock import ANY, Mock + +import pytest +from autobahn.websocket import ConnectionRequest + +from hathor.event.model.base_event import BaseEvent +from hathor.event.model.event_type import EventType +from hathor.event.websocket import EventWebsocketFactory +from hathor.event.websocket.protocol import EventWebsocketProtocol +from hathor.event.websocket.response import EventResponse, InvalidRequestType +from tests.utils import EventMocker + + +@pytest.fixture +def factory(): + return Mock(spec_set=EventWebsocketFactory) + + +def test_init(): + protocol = EventWebsocketProtocol() + + assert protocol.client_peer is None + assert protocol._last_sent_event_id is None + assert protocol._ack_event_id is None + assert protocol._window_size == 0 + assert not protocol._stream_is_active + + +def test_next_expected_event_id(): + protocol = EventWebsocketProtocol() + + assert protocol.next_expected_event_id() == 0 + + protocol._last_sent_event_id = 5 + + assert protocol.next_expected_event_id() == 6 + + +def test_on_connect(): + protocol = EventWebsocketProtocol() + request = Mock(spec_set=ConnectionRequest) + request.peer = 'some_peer' + + protocol.onConnect(request) + + assert protocol.client_peer == 'some_peer' + + +def test_on_open(factory): + protocol = EventWebsocketProtocol() + protocol.factory = factory + + protocol.onOpen() + + factory.register.assert_called_once_with(protocol) + + +def test_on_close(factory): + protocol = EventWebsocketProtocol() + protocol.factory = factory + + protocol.onClose(True, 1, 'reason') + + factory.unregister.assert_called_once_with(protocol) + + +def test_send_event_response(): + protocol = EventWebsocketProtocol() + protocol.sendMessage = Mock() + response = EventResponse( + event=BaseEvent( + peer_id='some_peer_id', + id=10, + timestamp=123, + type=EventType.VERTEX_METADATA_CHANGED, + data=EventMocker.tx_data + ), + latest_event_id=10 + ) + + protocol.send_event_response(response) + + expected_payload = b'{"type":"EVENT","event":{"peer_id":"some_peer_id","id":10,"timestamp":123.0,' \ + b'"type":"VERTEX_METADATA_CHANGED","data":{"hash":"abc","nonce":123,"timestamp":456,' \ + b'"version":1,"weight":10.0,"inputs":[],"outputs":[],"parents":[],"tokens":[],' \ + b'"token_name":null,"token_symbol":null,"metadata":{"hash":"abc","spent_outputs":[],' \ + b'"conflict_with":[],"voided_by":[],"received_by":[],"children":[],"twins":[],' \ + b'"accumulated_weight":10.0,"score":20.0,"first_block":null,"height":100,' \ + b'"validation":"validation"}},"group_id":null},"latest_event_id":10}' + + protocol.sendMessage.assert_called_once_with(expected_payload) + + +@pytest.mark.parametrize('_type', [InvalidRequestType.VALIDATION_ERROR, InvalidRequestType.STREAM_IS_INACTIVE]) +@pytest.mark.parametrize('invalid_payload', [None, b'some_payload']) +@pytest.mark.parametrize('error_message', [None, 'some error']) +def test_send_invalid_request_response(_type, invalid_payload, error_message): + protocol = EventWebsocketProtocol() + protocol.sendMessage = Mock() + + protocol.send_invalid_request_response(_type, invalid_payload, error_message) + + invalid_request = "null" if invalid_payload is None else f'"{invalid_payload.decode("utf8")}"' + error_message = "null" if error_message is None else f'"{error_message}"' + expected_payload = f'{{"type":"{_type.value}","invalid_request":{invalid_request},' \ + f'"error_message":{error_message}}}' + + protocol.sendMessage.assert_called_once_with(expected_payload.encode('utf8')) + + +@pytest.mark.parametrize( + [ + 'last_sent_event_id', + 'ack_event_id', + 'window_size', + 'stream_is_active', + 'event_id', + 'expected_result', + ], + [ + (None, None, 0, False, 0, False), + (None, None, 0, True, 0, False), + (None, None, 1, True, 0, True), + (0, None, 1, False, 1, False), + (0, None, 1, True, 1, False), + (0, 0, 1, False, 1, False), + (0, 0, 1, True, 1, True), + (1, 0, 1, True, 2, False), + (1, 0, 2, False, 2, False), + (1, 0, 2, True, 2, True), + (2, 2, 3, True, 3, True), + (3, 2, 3, True, 4, True), + (4, 2, 3, False, 5, False), + (4, 2, 3, True, 5, True), + (4, 2, 3, True, 4, False), + (5, 2, 3, True, 6, False), + ] +) +def test_can_receive_event( + last_sent_event_id: Optional[int], + ack_event_id: Optional[int], + window_size: int, + stream_is_active: bool, + event_id: int, + expected_result: bool +) -> None: + protocol = EventWebsocketProtocol() + protocol._last_sent_event_id = last_sent_event_id + protocol._ack_event_id = ack_event_id + protocol._window_size = window_size + protocol._stream_is_active = stream_is_active + + result = protocol.can_receive_event(event_id) + + assert result == expected_result + + +def test_on_valid_stop_message(): + protocol = EventWebsocketProtocol() + protocol._stream_is_active = True + + protocol.onMessage(b'{"type": "STOP_STREAM"}', False) + + assert not protocol._stream_is_active + + +def test_stop_message_on_inactive(): + protocol = EventWebsocketProtocol() + protocol.sendMessage = Mock() + protocol._stream_is_active = False + payload = b'{"type": "STOP_STREAM"}' + + protocol.onMessage(payload, False) + + response = b'{"type":"STREAM_IS_INACTIVE","invalid_request":"{\\"type\\": \\"STOP_STREAM\\"}",' \ + b'"error_message":null}' + protocol.sendMessage.assert_called_once_with(response) + assert not protocol._stream_is_active + + +@pytest.mark.parametrize( + ['ack_event_id', 'window_size', 'last_sent_event_id'], + [ + (0, 0, 0), + (0, 1, 10), + (0, 10, 1), + (1, 0, 1000), + (10, 0, 10), + ] +) +def test_on_valid_ack_message(ack_event_id, window_size, last_sent_event_id): + protocol = EventWebsocketProtocol() + protocol._last_sent_event_id = last_sent_event_id + protocol.factory = Mock() + protocol.factory.send_next_event_to_connection = Mock() + protocol._stream_is_active = True + payload = f'{{"type": "ACK", "ack_event_id": {ack_event_id}, "window_size": {window_size}}}'.encode('utf8') + + protocol.onMessage(payload, False) + + assert protocol._ack_event_id == ack_event_id + assert protocol._window_size == window_size + protocol.factory.send_next_event_to_connection.assert_called_once() + + +@pytest.mark.parametrize( + ['ack_event_id', 'window_size', 'last_sent_event_id'], + [ + (0, 0, 0), + (0, 1, 10), + (0, 10, 1), + (1, 0, 1000), + (10, 0, 10), + ] +) +def test_on_valid_start_message(ack_event_id, window_size, last_sent_event_id): + protocol = EventWebsocketProtocol() + protocol._last_sent_event_id = last_sent_event_id + protocol.factory = Mock() + protocol.factory.send_next_event_to_connection = Mock() + payload = f'{{"type": "START_STREAM", "last_ack_event_id": {ack_event_id}, "window_size": {window_size}}}' + + protocol.onMessage(payload.encode('utf8'), False) + + assert protocol._ack_event_id == ack_event_id + assert protocol._window_size == window_size + assert protocol._last_sent_event_id == ack_event_id + assert protocol._stream_is_active + protocol.factory.send_next_event_to_connection.assert_called_once() + + +def test_ack_message_on_inactive(): + protocol = EventWebsocketProtocol() + protocol.sendMessage = Mock() + protocol._stream_is_active = False + payload = b'{"type": "ACK", "ack_event_id": 10, "window_size": 10}' + + protocol.onMessage(payload, False) + + response = b'{"type":"STREAM_IS_INACTIVE","invalid_request":"{\\"type\\": \\"ACK\\", \\"ack_event_id\\": 10, ' \ + b'\\"window_size\\": 10}","error_message":null}' + protocol.sendMessage.assert_called_once_with(response) + + +def test_start_message_on_active(): + protocol = EventWebsocketProtocol() + protocol.sendMessage = Mock() + protocol._stream_is_active = True + payload = b'{"type": "START_STREAM", "last_ack_event_id": 10, "window_size": 10}' + + protocol.onMessage(payload, False) + + response = b'{"type":"STREAM_IS_ACTIVE","invalid_request":"{\\"type\\": \\"START_STREAM\\", ' \ + b'\\"last_ack_event_id\\": 10, \\"window_size\\": 10}","error_message":null}' + protocol.sendMessage.assert_called_once_with(response) + + +@pytest.mark.parametrize( + ['_ack_event_id', 'last_sent_event_id', 'ack_event_id', '_type'], + [ + (1, None, 0, InvalidRequestType.ACK_TOO_SMALL), + (1, 1, 0, InvalidRequestType.ACK_TOO_SMALL), + (10, None, 5, InvalidRequestType.ACK_TOO_SMALL), + (10, 1, 5, InvalidRequestType.ACK_TOO_SMALL), + (0, None, 1, InvalidRequestType.ACK_TOO_LARGE), + (0, 0, 1, InvalidRequestType.ACK_TOO_LARGE), + (5, None, 10, InvalidRequestType.ACK_TOO_LARGE), + (5, 1, 10, InvalidRequestType.ACK_TOO_LARGE), + ] +) +def test_on_invalid_ack_message(_ack_event_id, last_sent_event_id, ack_event_id, _type): + protocol = EventWebsocketProtocol() + protocol._ack_event_id = _ack_event_id + protocol._last_sent_event_id = last_sent_event_id + protocol.send_invalid_request_response = Mock() + protocol._stream_is_active = True + payload = f'{{"type": "ACK", "ack_event_id": {ack_event_id}, "window_size": 0}}'.encode('utf8') + + protocol.onMessage(payload, False) + + protocol.send_invalid_request_response.assert_called_once_with(_type, payload) + + +@pytest.mark.parametrize( + ['_ack_event_id', 'last_sent_event_id', 'ack_event_id', '_type'], + [ + (0, None, None, InvalidRequestType.ACK_TOO_SMALL), + (0, 1, None, InvalidRequestType.ACK_TOO_SMALL), + (1, None, 0, InvalidRequestType.ACK_TOO_SMALL), + (1, 1, 0, InvalidRequestType.ACK_TOO_SMALL), + (10, None, 5, InvalidRequestType.ACK_TOO_SMALL), + (10, 1, 5, InvalidRequestType.ACK_TOO_SMALL), + (None, None, 0, InvalidRequestType.ACK_TOO_LARGE), + (1, 0, 1, InvalidRequestType.ACK_TOO_LARGE), + (0, None, 1, InvalidRequestType.ACK_TOO_LARGE), + (0, 0, 1, InvalidRequestType.ACK_TOO_LARGE), + (5, None, 10, InvalidRequestType.ACK_TOO_LARGE), + (5, 1, 10, InvalidRequestType.ACK_TOO_LARGE), + ] +) +def test_on_invalid_start_message(_ack_event_id, last_sent_event_id, ack_event_id, _type): + protocol = EventWebsocketProtocol() + protocol._ack_event_id = _ack_event_id + protocol._last_sent_event_id = last_sent_event_id + protocol.send_invalid_request_response = Mock() + ack_event_id = 'null' if ack_event_id is None else ack_event_id + payload = f'{{"type": "START_STREAM", "last_ack_event_id": {ack_event_id}, "window_size": 0}}'.encode('utf8') + + protocol.onMessage(payload, False) + + protocol.send_invalid_request_response.assert_called_once_with(_type, payload) + + +@pytest.mark.parametrize( + 'payload', + [ + b'{"type": "FAKE_TYPE"}', + b'{"type": "STOP_STREAM", "fake_prop": 123}', + b'{"type": "START_STREAM", "last_ack_event_id": "wrong value", "window_size": 10}', + b'{"type": "START_STREAM", "last_ack_event_id": 0, "window_size": -10}', + b'{"type": "START_STREAM", "last_ack_event_id": -10, "window_size": 0}', + b'{"type": "ACK", "ack_event_id": 0, "window_size": "wrong value"}', + b'{"type": "ACK", "ack_event_id": 0, "window_size": -10}', + b'{"type": "ACK", "ack_event_id": -10, "window_size": 0}', + ] +) +def test_validation_error_on_message(payload): + protocol = EventWebsocketProtocol() + protocol.send_invalid_request_response = Mock() + protocol._stream_is_active = False + + protocol.onMessage(payload, False) + + protocol.send_invalid_request_response.assert_called_once_with(InvalidRequestType.VALIDATION_ERROR, payload, ANY) diff --git a/tests/others/test_builder.py b/tests/others/test_builder.py index a1ab2a1b5..b17b7c8c4 100644 --- a/tests/others/test_builder.py +++ b/tests/others/test_builder.py @@ -1,141 +1,22 @@ -from typing import List - -import pytest - -from hathor.builder import CliBuilder -from hathor.exception import BuilderError -from hathor.indexes import MemoryIndexesManager, RocksDBIndexesManager -from hathor.manager import HathorManager -from hathor.p2p.sync_version import SyncVersion -from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage, TransactionRocksDBStorage -from hathor.wallet import HDWallet, Wallet from tests import unittest -from tests.utils import HAS_ROCKSDB +from tests.unittest import TestBuilder class BuilderTestCase(unittest.TestCase): def setUp(self): super().setUp() - self.reactor = self.clock + self.builder = TestBuilder() + self.builder.use_memory() - from hathor.cli.run_node import RunNode - self.parser = RunNode.create_parser() - self.builder = CliBuilder() - - def _build_with_error(self, args: List[str], err_msg: str) -> None: - args = self.parser.parse_args(args) - with self.assertRaises(BuilderError) as cm: - self.builder.create_manager(self.reactor, args) - self.builder.register_resources(args, dry_run=True) - self.assertEqual(err_msg, str(cm.exception)) - - def _build(self, args: List[str]) -> HathorManager: - args = self.parser.parse_args(args) - manager = self.builder.create_manager(self.reactor, args) - self.assertIsNotNone(manager) - self.builder.register_resources(args, dry_run=True) - return manager - - def test_empty(self): - self._build_with_error([], '--data is expected') - - @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_all_default(self): - data_dir = self.mkdtemp() - manager = self._build(['--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) - self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - self.assertIsNone(manager.wallet) - self.assertEqual('unittests', manager.network) - self.assertIn(SyncVersion.V1, manager.connections._sync_factories) - self.assertNotIn(SyncVersion.V2, manager.connections._sync_factories) - self.assertFalse(self.builder._build_prometheus) - self.assertFalse(self.builder._build_status) - - @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_cache_storage(self): - data_dir = self.mkdtemp() - manager = self._build(['--cache', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) - self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) - self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - self.assertIsNone(manager.tx_storage.store.indexes) - - @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_default_storage_memory_indexes(self): - data_dir = self.mkdtemp() - manager = self._build(['--memory-indexes', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) - self.assertIsInstance(manager.tx_storage.indexes, MemoryIndexesManager) - - @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_default_storage_with_rocksdb_indexes(self): - data_dir = self.mkdtemp() - manager = self._build(['--x-rocksdb-indexes', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) - self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - - @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_rocksdb_storage(self): - data_dir = self.mkdtemp() - manager = self._build(['--rocksdb-storage', '--data', data_dir]) - self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) - self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) - - def test_memory_storage(self): - manager = self._build(['--memory-storage']) - self.assertIsInstance(manager.tx_storage, TransactionMemoryStorage) - self.assertIsInstance(manager.tx_storage.indexes, MemoryIndexesManager) - - def test_memory_storage_with_rocksdb_indexes(self): - self._build_with_error(['--memory-storage', '--x-rocksdb-indexes'], 'RocksDB indexes require RocksDB data') - - def test_sync_bridge(self): - manager = self._build(['--memory-storage', '--x-sync-bridge']) - self.assertIn(SyncVersion.V1, manager.connections._sync_factories) - self.assertIn(SyncVersion.V2, manager.connections._sync_factories) - - def test_sync_v2_only(self): - manager = self._build(['--memory-storage', '--x-sync-v2-only']) - self.assertNotIn(SyncVersion.V1, manager.connections._sync_factories) - self.assertIn(SyncVersion.V2, manager.connections._sync_factories) - - def test_keypair_wallet(self): - manager = self._build(['--memory-storage', '--wallet', 'keypair']) - self.assertIsInstance(manager.wallet, Wallet) - - def test_hd_wallet(self): - manager = self._build(['--memory-storage', '--wallet', 'hd']) - self.assertIsInstance(manager.wallet, HDWallet) - - def test_invalid_wallet(self): - self._build_with_error(['--memory-storage', '--wallet', 'invalid-wallet'], 'Invalid type of wallet') - - def test_status(self): - self._build([ - '--memory-storage', - '--status', '8080', - '--utxo-index', - '--enable-debug-api', - '--enable-crash-api' - ]) - self.assertTrue(self.builder._build_status) - self.clean_pending(required_to_quiesce=False) + def test_multiple_calls_to_build(self): + self.builder.build() - def test_prometheus_no_data(self): - args = ['--memory-storage', '--prometheus'] - self._build_with_error(args, 'To run prometheus exporter you must have a data path') + with self.assertRaises(ValueError): + self.builder.build() - @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_prometheus(self): - data_dir = self.mkdtemp() - self._build(['--prometheus', '--data', data_dir]) - self.assertTrue(self.builder._build_prometheus) - self.clean_pending(required_to_quiesce=False) + def test_check_if_can_modify(self): + self.builder.build() - @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') - def test_memory_and_rocksdb_indexes(self): - data_dir = self.mkdtemp() - args = ['--memory-indexes', '--x-rocksdb-indexes', '--data', data_dir] - self._build_with_error(args, 'You cannot use --memory-indexes and --x-rocksdb-indexes.') + with self.assertRaises(ValueError): + self.builder.set_reactor(self.reactor) diff --git a/tests/others/test_cli_builder.py b/tests/others/test_cli_builder.py new file mode 100644 index 000000000..2513a0487 --- /dev/null +++ b/tests/others/test_cli_builder.py @@ -0,0 +1,184 @@ +from typing import List + +import pytest + +from hathor.builder import CliBuilder +from hathor.event import EventManager +from hathor.event.storage import EventMemoryStorage, EventRocksDBStorage +from hathor.event.websocket import EventWebsocketFactory +from hathor.exception import BuilderError +from hathor.indexes import MemoryIndexesManager, RocksDBIndexesManager +from hathor.manager import HathorManager +from hathor.p2p.sync_version import SyncVersion +from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage, TransactionRocksDBStorage +from hathor.wallet import HDWallet, Wallet +from tests import unittest +from tests.utils import HAS_ROCKSDB + + +class BuilderTestCase(unittest.TestCase): + def setUp(self): + super().setUp() + + self.reactor = self.clock + + from hathor.cli.run_node import RunNode + self.parser = RunNode.create_parser() + self.builder = CliBuilder() + + def _build_with_error(self, args: List[str], err_msg: str) -> None: + args = self.parser.parse_args(args) + with self.assertRaises(BuilderError) as cm: + self.builder.create_manager(self.reactor, args) + self.builder.register_resources(args, dry_run=True) + self.assertEqual(err_msg, str(cm.exception)) + + def _build(self, args: List[str]) -> HathorManager: + args = self.parser.parse_args(args) + manager = self.builder.create_manager(self.reactor, args) + self.assertIsNotNone(manager) + self.builder.register_resources(args, dry_run=True) + return manager + + def test_empty(self): + self._build_with_error([], '--data is expected') + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_all_default(self): + data_dir = self.mkdtemp() + manager = self._build(['--data', data_dir]) + self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) + self.assertIsNone(manager.wallet) + self.assertEqual('unittests', manager.network) + self.assertNotIn(SyncVersion.V1, manager.connections._sync_factories) + self.assertIn(SyncVersion.V1_1, manager.connections._sync_factories) + self.assertNotIn(SyncVersion.V2, manager.connections._sync_factories) + self.assertFalse(self.builder._build_prometheus) + self.assertFalse(self.builder._build_status) + self.assertIsNone(manager._event_manager) + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_cache_storage(self): + data_dir = self.mkdtemp() + manager = self._build(['--cache', '--data', data_dir]) + self.assertIsInstance(manager.tx_storage, TransactionCacheStorage) + self.assertIsInstance(manager.tx_storage.store, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) + self.assertIsNone(manager.tx_storage.store.indexes) + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_default_storage_memory_indexes(self): + data_dir = self.mkdtemp() + manager = self._build(['--memory-indexes', '--data', data_dir]) + self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage.indexes, MemoryIndexesManager) + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_default_storage_with_rocksdb_indexes(self): + data_dir = self.mkdtemp() + manager = self._build(['--x-rocksdb-indexes', '--data', data_dir]) + self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_rocksdb_storage(self): + data_dir = self.mkdtemp() + manager = self._build(['--rocksdb-storage', '--data', data_dir]) + self.assertIsInstance(manager.tx_storage, TransactionRocksDBStorage) + self.assertIsInstance(manager.tx_storage.indexes, RocksDBIndexesManager) + + def test_memory_storage(self): + manager = self._build(['--memory-storage']) + self.assertIsInstance(manager.tx_storage, TransactionMemoryStorage) + self.assertIsInstance(manager.tx_storage.indexes, MemoryIndexesManager) + + def test_memory_storage_with_rocksdb_indexes(self): + self._build_with_error(['--memory-storage', '--x-rocksdb-indexes'], 'RocksDB indexes require RocksDB data') + + def test_sync_v1_0_legacy(self): + manager = self._build(['--memory-storage', '--x-enable-legacy-sync-v1_0']) + self.assertIn(SyncVersion.V1, manager.connections._sync_factories) + self.assertIn(SyncVersion.V1_1, manager.connections._sync_factories) + self.assertNotIn(SyncVersion.V2, manager.connections._sync_factories) + + def test_sync_bridge(self): + manager = self._build(['--memory-storage', '--x-sync-bridge']) + self.assertNotIn(SyncVersion.V1, manager.connections._sync_factories) + self.assertIn(SyncVersion.V1_1, manager.connections._sync_factories) + self.assertIn(SyncVersion.V2, manager.connections._sync_factories) + + def test_sync_v2_only(self): + manager = self._build(['--memory-storage', '--x-sync-v2-only']) + self.assertNotIn(SyncVersion.V1_1, manager.connections._sync_factories) + self.assertNotIn(SyncVersion.V1, manager.connections._sync_factories) + self.assertIn(SyncVersion.V2, manager.connections._sync_factories) + + def test_keypair_wallet(self): + manager = self._build(['--memory-storage', '--wallet', 'keypair']) + self.assertIsInstance(manager.wallet, Wallet) + + def test_hd_wallet(self): + manager = self._build(['--memory-storage', '--wallet', 'hd']) + self.assertIsInstance(manager.wallet, HDWallet) + + def test_invalid_wallet(self): + self._build_with_error(['--memory-storage', '--wallet', 'invalid-wallet'], 'Invalid type of wallet') + + def test_status(self): + self._build([ + '--memory-storage', + '--status', '8080', + '--utxo-index', + '--enable-debug-api', + '--enable-crash-api' + ]) + self.assertTrue(self.builder._build_status) + self.clean_pending(required_to_quiesce=False) + + def test_prometheus_no_data(self): + args = ['--memory-storage', '--prometheus'] + self._build_with_error(args, 'To run prometheus exporter you must have a data path') + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_prometheus(self): + data_dir = self.mkdtemp() + self._build(['--prometheus', '--data', data_dir]) + self.assertTrue(self.builder._build_prometheus) + self.clean_pending(required_to_quiesce=False) + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_memory_and_rocksdb_indexes(self): + data_dir = self.mkdtemp() + args = ['--memory-indexes', '--x-rocksdb-indexes', '--data', data_dir] + self._build_with_error(args, 'You cannot use --memory-indexes and --x-rocksdb-indexes.') + + @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') + def test_event_queue_with_rocksdb_storage(self): + data_dir = self.mkdtemp() + manager = self._build(['--x-enable-event-queue', '--rocksdb-storage', '--data', data_dir]) + + self.assertIsInstance(manager._event_manager, EventManager) + self.assertIsInstance(manager._event_manager._event_storage, EventRocksDBStorage) + self.assertIsInstance(manager._event_manager._event_ws_factory, EventWebsocketFactory) + self.assertFalse(manager._event_manager.emit_load_events) + + def test_event_queue_with_memory_storage(self): + manager = self._build(['--x-enable-event-queue', '--memory-storage']) + + self.assertIsInstance(manager._event_manager, EventManager) + self.assertIsInstance(manager._event_manager._event_storage, EventMemoryStorage) + self.assertIsInstance(manager._event_manager._event_ws_factory, EventWebsocketFactory) + self.assertFalse(manager._event_manager.emit_load_events) + + def test_event_queue_with_full_verification(self): + args = ['--x-enable-event-queue', '--memory-storage', '--x-full-verification'] + self._build_with_error(args, '--x-full-verification cannot be used with --x-enable-event-queue') + + def test_event_queue_with_emit_load_events(self): + manager = self._build(['--x-enable-event-queue', '--memory-storage', '--x-emit-load-events']) + + self.assertIsInstance(manager._event_manager, EventManager) + self.assertIsInstance(manager._event_manager._event_storage, EventMemoryStorage) + self.assertIsInstance(manager._event_manager._event_ws_factory, EventWebsocketFactory) + self.assertTrue(manager._event_manager.emit_load_events) diff --git a/tests/others/test_init_manager.py b/tests/others/test_init_manager.py index 68adfa9a2..bdeb5f4e0 100644 --- a/tests/others/test_init_manager.py +++ b/tests/others/test_init_manager.py @@ -1,10 +1,11 @@ from typing import Iterator from hathor.conf import HathorSettings -from hathor.manager import HathorManager +from hathor.pubsub import PubSubManager from hathor.transaction import BaseTransaction from hathor.transaction.storage import TransactionMemoryStorage from tests import unittest +from tests.unittest import TestBuilder from tests.utils import ( add_blocks_unlock_reward, add_new_block, @@ -24,12 +25,12 @@ def __init__(self, *args, **kwargs): def set_first_tx(self, tx: BaseTransaction) -> None: self._first_tx = tx - def get_all_transactions(self) -> Iterator[BaseTransaction]: + def get_all_transactions(self, *, include_partial: bool = False) -> Iterator[BaseTransaction]: skip_hash = None if self._first_tx: yield self._first_tx skip_hash = self._first_tx.hash - for tx in super().get_all_transactions(): + for tx in super().get_all_transactions(include_partial=include_partial): if tx.hash != skip_hash: yield tx @@ -38,35 +39,49 @@ class SimpleManagerInitializationTestCase(unittest.TestCase): def setUp(self): super().setUp() self.tx_storage = ModifiedTransactionMemoryStorage() + self.pubsub = PubSubManager(self.clock) def test_invalid_arguments(self): # this is a base case, it shouldn't raise any error # (otherwise we might not be testing the correct thing below) - manager = HathorManager(self.clock, tx_storage=self.tx_storage) + builder = TestBuilder() + builder.set_tx_storage(self.tx_storage) + artifacts = builder.build() + manager = artifacts.manager del manager # disabling both sync versions should be invalid with self.assertRaises(TypeError): - HathorManager(self.clock, tx_storage=self.tx_storage, enable_sync_v1=False, enable_sync_v2=False) - - # not passing a storage should be invalid - with self.assertRaises(TypeError): - HathorManager(self.clock) + builder = TestBuilder() + builder.set_tx_storage(self.tx_storage) + builder.disable_sync_v1() + builder.disable_sync_v2() + builder.build() def tests_init_with_stratum(self): - manager = HathorManager(self.clock, tx_storage=self.tx_storage, stratum_port=50505) + builder = TestBuilder() + builder.set_tx_storage(self.tx_storage) + builder.enable_stratum_server(50505) + artifacts = builder.build() + manager = artifacts.manager manager.start() manager.stop() del manager def test_double_start(self): - manager = HathorManager(self.clock, tx_storage=self.tx_storage) + builder = TestBuilder() + builder.set_tx_storage(self.tx_storage) + artifacts = builder.build() + manager = artifacts.manager manager.start() with self.assertRaises(Exception): manager.start() def test_wrong_stop(self): - manager = HathorManager(self.clock, tx_storage=self.tx_storage) + builder = TestBuilder() + builder.set_tx_storage(self.tx_storage) + artifacts = builder.build() + manager = artifacts.manager with self.assertRaises(Exception): manager.stop() manager.start() diff --git a/tests/others/test_metrics.py b/tests/others/test_metrics.py index 2f6b39a8d..58cada2f1 100644 --- a/tests/others/test_metrics.py +++ b/tests/others/test_metrics.py @@ -3,14 +3,12 @@ import pytest -from hathor.manager import HathorManager from hathor.p2p.manager import PeerConnectionsMetrics from hathor.p2p.peer_id import PeerId from hathor.p2p.protocol import HathorProtocol -from hathor.pubsub import HathorEvents, PubSubManager +from hathor.pubsub import HathorEvents from hathor.storage import RocksDBStorage from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage, TransactionRocksDBStorage -from hathor.util import reactor from hathor.wallet import Wallet from tests import unittest from tests.utils import HAS_ROCKSDB, add_new_blocks @@ -27,11 +25,10 @@ def test_p2p_network_events(self): the event to set its own fields related to the network peers """ # Preparation - tx_storage = TransactionMemoryStorage() - pubsub = PubSubManager(reactor) - manager = HathorManager(self.clock, tx_storage=tx_storage, pubsub=pubsub) - - manager.metrics.start() + self.use_memory_storage = True + manager = self.create_peer('testnet') + self.assertIsInstance(manager.tx_storage, TransactionMemoryStorage) + pubsub = manager.pubsub # Execution pubsub.publish( @@ -39,6 +36,7 @@ def test_p2p_network_events(self): protocol=Mock(), peers_count=PeerConnectionsMetrics(3, 4, 5, 6) ) + self.run_to_completion() # Assertion self.assertEquals(manager.metrics.connecting_peers, 3) @@ -60,11 +58,9 @@ def test_connections_manager_integration(self): self.tmpdirs.append(tmpdir) wallet = Wallet(directory=tmpdir) wallet.unlock(b'teste') - manager = HathorManager(self.clock, tx_storage=tx_storage, wallet=wallet) + manager = self.create_peer('testnet', tx_storage=tx_storage, wallet=wallet) - manager.metrics.start() - - manager.connections.peer_storage.update({"1": Mock(), "2": Mock(), "3": Mock()}) + manager.connections.peer_storage.update({"1": PeerId(), "2": PeerId(), "3": PeerId()}) manager.connections.connected_peers.update({"1": Mock(), "2": Mock()}) manager.connections.handshaking_peers.update({Mock()}) @@ -91,8 +87,6 @@ def test_tx_storage_data_collection_with_rocksdb_storage_and_no_cache(self): The expected result is that it will successfully collect the RocksDB metrics. """ - reactor = self.clock - path = tempfile.mkdtemp() self.tmpdirs.append(path) @@ -101,10 +95,8 @@ def _init_manager(): tx_storage = TransactionRocksDBStorage(rocksdb_storage, with_index=True, use_memory_indexes=True) - - pubsub = PubSubManager(reactor) - wallet = self._create_test_wallet() - return HathorManager(reactor=reactor, tx_storage=tx_storage, pubsub=pubsub, wallet=wallet) + manager = self.create_peer('testnet', tx_storage=tx_storage, start_manager=False) + return manager manager = _init_manager() manager.metrics._collect_data() @@ -132,8 +124,8 @@ def _init_manager(): # We don't know exactly the sizes of each column family, # but we know empirically that they should be higher than these values - self.assertTrue(manager.metrics.rocksdb_cfs_sizes[b'tx'] > 500) - self.assertTrue(manager.metrics.rocksdb_cfs_sizes[b'meta'] > 1000) + self.assertGreater(manager.metrics.rocksdb_cfs_sizes[b'tx'], 500) + self.assertGreater(manager.metrics.rocksdb_cfs_sizes[b'meta'], 1000) @pytest.mark.skipif(not HAS_ROCKSDB, reason='requires python-rocksdb') def test_tx_storage_data_collection_with_rocksdb_storage_and_cache(self): @@ -143,8 +135,6 @@ def test_tx_storage_data_collection_with_rocksdb_storage_and_cache(self): The expected result is that it will successfully collect the RocksDB metrics. """ - reactor = self.clock - path = tempfile.mkdtemp() self.tmpdirs.append(path) @@ -153,11 +143,10 @@ def _init_manager(): tx_storage = TransactionRocksDBStorage(rocksdb_storage, with_index=False, use_memory_indexes=True) - tx_storage = TransactionCacheStorage(tx_storage, reactor) + tx_storage = TransactionCacheStorage(tx_storage, self.clock) - pubsub = PubSubManager(reactor) wallet = self._create_test_wallet() - return HathorManager(reactor=reactor, tx_storage=tx_storage, pubsub=pubsub, wallet=wallet) + return self.create_peer('testnet', tx_storage=tx_storage, wallet=wallet, start_manager=False) manager = _init_manager() manager.metrics._collect_data() @@ -197,12 +186,10 @@ def test_tx_storage_data_collection_with_memory_storage(self): The expected result is that nothing is done, because we currently only collect data for RocksDB storage """ - reactor = self.clock tx_storage = TransactionMemoryStorage() # All - pubsub = PubSubManager(reactor) - manager = HathorManager(reactor=reactor, tx_storage=tx_storage, pubsub=pubsub) + manager = self.create_peer('testnet', tx_storage=tx_storage) manager.metrics._collect_data() @@ -213,13 +200,11 @@ def test_peer_connections_data_collection(self): ConnectionsManager """ # Preparation - reactor = self.clock - tx_storage = TransactionMemoryStorage(with_index=False) - pubsub = PubSubManager(reactor) - - manager = HathorManager(reactor=reactor, tx_storage=tx_storage, pubsub=pubsub) + self.use_memory_storage = True + manager = self.create_peer('testnet') + self.assertIsInstance(manager.tx_storage, TransactionMemoryStorage) - my_peer = PeerId() + my_peer = manager.my_peer def build_hathor_protocol(): protocol = HathorProtocol( @@ -271,13 +256,10 @@ def test_cache_data_collection(self): TransactionCacheStorage """ # Preparation - reactor = self.clock base_storage = TransactionMemoryStorage(with_index=False) - tx_storage = TransactionCacheStorage(base_storage, reactor) - - pubsub = PubSubManager(reactor) + tx_storage = TransactionCacheStorage(base_storage, self.clock) - manager = HathorManager(reactor=reactor, tx_storage=tx_storage, pubsub=pubsub) + manager = self.create_peer('testnet', tx_storage=tx_storage) tx_storage.stats["hit"] = 10 tx_storage.stats["miss"] = 20 diff --git a/tests/p2p/test_connections.py b/tests/p2p/test_connections.py index 8b6b0fb1b..34730afb0 100644 --- a/tests/p2p/test_connections.py +++ b/tests/p2p/test_connections.py @@ -1,11 +1,7 @@ import sys -import tempfile import pytest -from hathor.manager import HathorManager -from hathor.transaction.storage import TransactionMemoryStorage -from hathor.wallet import Wallet from tests import unittest from tests.utils import run_server @@ -22,12 +18,7 @@ def test_connections(self): process3.terminate() def test_manager_connections(self): - tx_storage = TransactionMemoryStorage() - tmpdir = tempfile.mkdtemp() - self.tmpdirs.append(tmpdir) - wallet = Wallet(directory=tmpdir) - wallet.unlock(b'teste') - manager = HathorManager(self.clock, tx_storage=tx_storage, wallet=wallet) + manager = self.create_peer('testnet', enable_sync_v1=True, enable_sync_v2=False) endpoint = 'tcp://127.0.0.1:8005' manager.connections.connect_to(endpoint, use_ssl=True) diff --git a/tests/p2p/test_sync.py b/tests/p2p/test_sync.py index 9447980df..c2c2724a5 100644 --- a/tests/p2p/test_sync.py +++ b/tests/p2p/test_sync.py @@ -268,7 +268,7 @@ def test_downloader(self): self.assertTrue(isinstance(conn.proto1.state, PeerIdState)) self.assertTrue(isinstance(conn.proto2.state, PeerIdState)) - downloader = conn.proto2.connections._sync_factories[SyncVersion.V1].downloader + downloader = conn.proto2.connections._sync_factories[SyncVersion.V1_1].downloader node_sync1 = NodeSyncTimestamp(conn.proto1, downloader, reactor=conn.proto1.node.reactor) node_sync1.start() @@ -361,7 +361,7 @@ def _downloader_bug_setup(self): # create the peer that will experience the bug self.manager_bug = self.create_peer(self.network) - self.downloader = self.manager_bug.connections._sync_factories[SyncVersion.V1].downloader + self.downloader = self.manager_bug.connections._sync_factories[SyncVersion.V1_1].downloader self.downloader.window_size = 1 self.conn1 = FakeConnection(self.manager_bug, self.manager1) self.conn2 = FakeConnection(self.manager_bug, self.manager2) @@ -460,7 +460,7 @@ def test_downloader_retry_reorder(self): def test_downloader_disconnect(self): """ This is related to test_downloader_retry_reorder, but it basically tests the change in behavior instead. - When a peer disconnects it should be immediately remvoed from the tx-detail's connections list. + When a peer disconnects it should be immediately removed from the tx-detail's connections list. """ self._downloader_bug_setup() diff --git a/tests/p2p/test_sync_enabled.py b/tests/p2p/test_sync_enabled.py new file mode 100644 index 000000000..a62f9c041 --- /dev/null +++ b/tests/p2p/test_sync_enabled.py @@ -0,0 +1,75 @@ +from hathor.simulator import FakeConnection +from hathor.simulator.trigger import StopAfterNMinedBlocks +from tests import unittest +from tests.simulation.base import SimulatorTestCase + + +class BaseRandomSimulatorTestCase(SimulatorTestCase): + def test_new_node_disabled(self): + manager1 = self.create_peer() + manager1.allow_mining_without_peers() + + miner1 = self.simulator.create_miner(manager1, hashpower=10e6) + miner1.start() + trigger = StopAfterNMinedBlocks(miner1, quantity=20) + self.simulator.run(3600, trigger=trigger) + + gen_tx1 = self.simulator.create_tx_generator(manager1, rate=3 / 60., hashpower=1e6, ignore_no_funds=True) + gen_tx1.start() + self.simulator.run(3600) + + for _ in range(20): + print() + + manager2 = self.create_peer() + manager2.connections.MAX_ENABLED_SYNC = 0 + conn12 = FakeConnection(manager1, manager2, latency=0.05) + self.simulator.add_connection(conn12) + + self.assertFalse(conn12._proto2.is_sync_enabled()) + v2 = list(manager2.tx_storage.get_all_transactions()) + self.assertEqual(3, len(v2)) + + self.simulator.run(3600) + + v1 = list(manager1.tx_storage.get_all_transactions()) + self.assertGreater(len(v1), 3) + + self.assertFalse(conn12._proto2.is_sync_enabled()) + v2 = list(manager2.tx_storage.get_all_transactions()) + self.assertEqual(3, len(v2)) + + def test_sync_rotate(self): + manager1 = self.create_peer() + manager1.connections.MAX_ENABLED_SYNC = 3 + other_managers = [self.create_peer() for _ in range(15)] + + connections = [] + for other in other_managers: + conn = FakeConnection(manager1, other, latency=0.05) + connections.append(conn) + self.simulator.add_connection(conn) + + self.simulator.run(600) + + enabled = set(conn for conn in connections if conn.proto1.is_sync_enabled()) + self.assertTrue(len(enabled), 3) + + manager1.connections._sync_rotate_if_needed(force=True) + enabled2 = set(conn for conn in connections if conn.proto1.is_sync_enabled()) + self.assertTrue(len(enabled2), 3) + # Chance of false positive: 1/comb(20, 3) = 0.0008771929824561404 + self.assertNotEqual(enabled, enabled2) + + +class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, BaseRandomSimulatorTestCase): + __test__ = True + + +class SyncV2RandomSimulatorTestCase(unittest.SyncV2Params, BaseRandomSimulatorTestCase): + __test__ = True + + +# sync-bridge should behave like sync-v2 +class SyncBridgeRandomSimulatorTestCase(unittest.SyncBridgeParams, SyncV2RandomSimulatorTestCase): + __test__ = True diff --git a/tests/p2p/test_sync_rate_limiter.py b/tests/p2p/test_sync_rate_limiter.py new file mode 100644 index 000000000..df0020841 --- /dev/null +++ b/tests/p2p/test_sync_rate_limiter.py @@ -0,0 +1,54 @@ +from unittest.mock import MagicMock + +from hathor.simulator import FakeConnection +from hathor.simulator.trigger import StopAfterNMinedBlocks +from tests import unittest +from tests.simulation.base import SimulatorTestCase + + +class BaseRandomSimulatorTestCase(SimulatorTestCase): + def test_sync_rate_limiter(self): + manager1 = self.create_peer() + + miner1 = self.simulator.create_miner(manager1, hashpower=10e6) + miner1.start() + trigger = StopAfterNMinedBlocks(miner1, quantity=20) + self.simulator.run(3600, trigger=trigger) + + manager2 = self.create_peer() + manager2.connections.MAX_ENABLED_SYNC = 0 + conn12 = FakeConnection(manager1, manager2, latency=0.05) + self.simulator.add_connection(conn12) + self.simulator.run(3600) + + manager2.connections.enable_rate_limiter(8, 2) + + connected_peers2 = list(manager2.connections.connected_peers.values()) + self.assertEqual(1, len(connected_peers2)) + protocol2 = connected_peers2[0] + sync2 = protocol2.state.sync_manager + sync2._send_tips = MagicMock() + + for i in range(100): + sync2.send_tips() + self.assertEqual(sync2._send_tips.call_count, min(i + 1, 8)) + self.assertEqual(sync2._send_tips.call_count, 8) + + sync2.send_tips() + self.assertEqual(sync2._send_tips.call_count, 8) + + self.simulator._clock.advance(2000) + self.assertTrue(sync2._send_tips.call_count, 16) + + +class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, BaseRandomSimulatorTestCase): + __test__ = True + + +class SyncV2RandomSimulatorTestCase(unittest.SyncV2Params, BaseRandomSimulatorTestCase): + __test__ = True + + +# sync-bridge should behave like sync-v2 +class SyncBridgeRandomSimulatorTestCase(unittest.SyncBridgeParams, SyncV2RandomSimulatorTestCase): + __test__ = True diff --git a/tests/p2p/test_whitelist.py b/tests/p2p/test_whitelist.py new file mode 100644 index 000000000..7f1b28759 --- /dev/null +++ b/tests/p2p/test_whitelist.py @@ -0,0 +1,81 @@ +from unittest.mock import patch + +from hathor.conf import HathorSettings +from hathor.p2p.sync_version import SyncVersion +from hathor.simulator import FakeConnection +from tests import unittest + +settings = HathorSettings() + + +class WhitelistTestCase(unittest.SyncV1Params, unittest.TestCase): + @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) + def test_sync_v11_whitelist_no_no(self): + network = 'testnet' + + manager1 = self.create_peer(network) + self.assertEqual(set(manager1.connections._sync_factories.keys()), {SyncVersion.V1_1}) + + manager2 = self.create_peer(network) + self.assertEqual(set(manager2.connections._sync_factories.keys()), {SyncVersion.V1_1}) + + conn = FakeConnection(manager1, manager2) + self.assertFalse(conn.tr1.disconnecting) + self.assertFalse(conn.tr2.disconnecting) + + # Run the p2p protocol. + for _ in range(100): + conn.run_one_step(debug=True) + self.clock.advance(0.1) + + self.assertTrue(conn.tr1.disconnecting) + self.assertTrue(conn.tr2.disconnecting) + + @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) + def test_sync_v11_whitelist_yes_no(self): + network = 'testnet' + + manager1 = self.create_peer(network) + self.assertEqual(set(manager1.connections._sync_factories.keys()), {SyncVersion.V1_1}) + + manager2 = self.create_peer(network) + self.assertEqual(set(manager2.connections._sync_factories.keys()), {SyncVersion.V1_1}) + + manager1.peers_whitelist.append(manager2.my_peer.id) + + conn = FakeConnection(manager1, manager2) + self.assertFalse(conn.tr1.disconnecting) + self.assertFalse(conn.tr2.disconnecting) + + # Run the p2p protocol. + for _ in range(100): + conn.run_one_step(debug=True) + self.clock.advance(0.1) + + self.assertFalse(conn.tr1.disconnecting) + self.assertTrue(conn.tr2.disconnecting) + + @patch('hathor.p2p.states.peer_id.settings', new=settings._replace(ENABLE_PEER_WHITELIST=True)) + def test_sync_v11_whitelist_yes_yes(self): + network = 'testnet' + + manager1 = self.create_peer(network) + self.assertEqual(set(manager1.connections._sync_factories.keys()), {SyncVersion.V1_1}) + + manager2 = self.create_peer(network) + self.assertEqual(set(manager2.connections._sync_factories.keys()), {SyncVersion.V1_1}) + + manager1.peers_whitelist.append(manager2.my_peer.id) + manager2.peers_whitelist.append(manager1.my_peer.id) + + conn = FakeConnection(manager1, manager2) + self.assertFalse(conn.tr1.disconnecting) + self.assertFalse(conn.tr2.disconnecting) + + # Run the p2p protocol. + for _ in range(100): + conn.run_one_step(debug=True) + self.clock.advance(0.1) + + self.assertFalse(conn.tr1.disconnecting) + self.assertFalse(conn.tr2.disconnecting) diff --git a/tests/resources/base_resource.py b/tests/resources/base_resource.py index 5bda9ac5a..b7aebc16b 100644 --- a/tests/resources/base_resource.py +++ b/tests/resources/base_resource.py @@ -1,61 +1,25 @@ -import tempfile - from twisted.internet.defer import succeed from twisted.web import server from twisted.web.test.requesthelper import DummyRequest from hathor.daa import TestMode, _set_test_mode -from hathor.manager import HathorManager -from hathor.p2p.peer_id import PeerId -from hathor.storage.rocksdb_storage import RocksDBStorage -from hathor.util import get_environment_info, json_dumpb, json_loadb +from hathor.util import json_dumpb, json_loadb from tests import unittest class _BaseResourceTest: class _ResourceTest(unittest.TestCase): - def _manager_kwargs(self): - peer_id = PeerId() - network = 'testnet' - wallet = self._create_test_wallet() - tx_storage = getattr(self, 'tx_storage', None) - if tx_storage is None: - if self.use_memory_storage: - from hathor.transaction.storage.memory_storage import TransactionMemoryStorage - tx_storage = TransactionMemoryStorage() - else: - from hathor.transaction.storage.rocksdb_storage import TransactionRocksDBStorage - directory = tempfile.mkdtemp() - self.tmpdirs.append(directory) - rocksdb_storage = RocksDBStorage(path=directory) - tx_storage = TransactionRocksDBStorage(rocksdb_storage) - assert ( - hasattr(self, '_enable_sync_v1') and - hasattr(self, '_enable_sync_v2') and - (self._enable_sync_v1 or self._enable_sync_v2) - ), ( - 'Please set both `_enable_sync_v1` and `_enable_sync_v2` on the class. ' - 'Also they can\'t both be False. ' - 'This is by design so we don\'t forget to test for multiple sync versions.' - ) - return dict( - peer_id=peer_id, - network=network, - wallet=wallet, - tx_storage=tx_storage, - wallet_index=True, - enable_sync_v1=self._enable_sync_v1, - enable_sync_v2=self._enable_sync_v2, - environment_info=get_environment_info("", peer_id.id) - ) - - def setUp(self): + def setUp(self, *, utxo_index: bool = False, unlock_wallet: bool = True) -> None: super().setUp() self.reactor = self.clock - self.manager = HathorManager(self.reactor, **self._manager_kwargs()) + self.manager = self.create_peer( + 'testnet', + wallet_index=True, + utxo_index=utxo_index, + unlock_wallet=unlock_wallet + ) self.manager.allow_mining_without_peers() _set_test_mode(TestMode.TEST_ALL_WEIGHT) - self.manager.start() def tearDown(self): return self.manager.stop() diff --git a/tests/resources/event/__init__.py b/tests/resources/event/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/resources/event/test_event.py b/tests/resources/event/test_event.py new file mode 100644 index 000000000..40423d6dc --- /dev/null +++ b/tests/resources/event/test_event.py @@ -0,0 +1,103 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from unittest.mock import Mock + +import pytest + +from hathor.event import EventManager +from hathor.event.resources.event import EventResource +from hathor.event.storage import EventMemoryStorage +from tests.resources.base_resource import StubSite +from tests.utils import EventMocker + + +@pytest.fixture +def web(): + event_storage = EventMemoryStorage() + + for i in range(3): + event = EventMocker.create_event(i) + event_storage.save_event(event) + + event_manager = Mock(spec_set=EventManager) + event_manager.event_storage = event_storage + + return StubSite(EventResource(event_manager)) + + +@pytest.fixture +def data(): + return EventMocker.tx_data.dict() + + +def test_get_events(web, data): + response = web.get('event').result + result = response.json_value() + expected = { + 'events': [ + {'peer_id': '123', 'id': 0, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, + 'group_id': None}, + {'peer_id': '123', 'id': 1, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, + 'group_id': None}, + {'peer_id': '123', 'id': 2, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, + 'group_id': None} + ], + 'latest_event_id': 2 + } + + assert result == expected + + +def test_get_events_with_size(web, data): + response = web.get('event', {b'size': b'1'}) + result = response.result.json_value() + expected = { + 'events': [ + {'peer_id': '123', 'id': 0, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, + 'group_id': None} + ], + 'latest_event_id': 2 + } + + assert result == expected + + +def test_get_events_with_last_ack_event_id(web, data): + response = web.get('event', {b'last_ack_event_id': b'0'}) + result = response.result.json_value() + expected = { + 'events': [ + {'peer_id': '123', 'id': 1, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, + 'group_id': None}, + {'peer_id': '123', 'id': 2, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, + 'group_id': None} + ], + 'latest_event_id': 2 + } + + assert result == expected + + +def test_get_events_with_size_and_last_ack_event_id(web, data): + response = web.get('event', {b'last_ack_event_id': b'0', b'size': b'1'}) + result = response.result.json_value() + expected = { + 'events': [ + {'peer_id': '123', 'id': 1, 'timestamp': 123456.0, 'type': 'VERTEX_METADATA_CHANGED', 'data': data, + 'group_id': None}, + ], + 'latest_event_id': 2 + } + + assert result == expected diff --git a/tests/resources/p2p/test_status.py b/tests/resources/p2p/test_status.py index 6e1e215fc..ea80ece6e 100644 --- a/tests/resources/p2p/test_status.py +++ b/tests/resources/p2p/test_status.py @@ -2,6 +2,7 @@ from twisted.internet.defer import inlineCallbacks import hathor +from hathor.conf.unittests import SETTINGS from hathor.p2p.resources import StatusResource from hathor.simulator import FakeConnection from tests import unittest @@ -22,11 +23,32 @@ def setUp(self): def test_get(self): response = yield self.web.get("status") data = response.json_value() + server_data = data.get('server') self.assertEqual(server_data['app_version'], 'Hathor v{}'.format(hathor.__version__)) self.assertEqual(server_data['network'], 'testnet') self.assertGreater(server_data['uptime'], 0) + dag_data = data.get('dag') + # We have the genesis block + self.assertEqual(len(dag_data['best_block_tips']), 1) + self.assertIsNotNone(dag_data['best_block_tips'][0]) + # As we don't have a type, we must check if the keys are there, + # and the types are correct + self.assertIn('hash', dag_data['best_block_tips'][0]) + self.assertIn('height', dag_data['best_block_tips'][0]) + self.assertIsInstance(dag_data['best_block_tips'][0]['hash'], str) + self.assertIsInstance(dag_data['best_block_tips'][0]['height'], int) + self.assertEqual(dag_data['best_block_tips'][0]['hash'], SETTINGS.GENESIS_BLOCK_HASH.hex()) + self.assertEqual(dag_data['best_block_tips'][0]['height'], 0) + self.assertIsNotNone(dag_data['best_block']) + self.assertIn('hash', dag_data['best_block']) + self.assertIn('height', dag_data['best_block']) + self.assertIsInstance(dag_data['best_block']['hash'], str) + self.assertIsInstance(dag_data['best_block']['height'], int) + self.assertEqual(dag_data['best_block']['hash'], SETTINGS.GENESIS_BLOCK_HASH.hex()) + self.assertEqual(dag_data['best_block']['height'], 0) + @inlineCallbacks def test_handshaking(self): response = yield self.web.get("status") diff --git a/tests/resources/test_version.py b/tests/resources/test_version.py index f353e9ed1..f5c5e121d 100644 --- a/tests/resources/test_version.py +++ b/tests/resources/test_version.py @@ -1,6 +1,12 @@ +import shutil +import subprocess +import tempfile +from unittest.mock import patch + from twisted.internet.defer import inlineCallbacks import hathor +from hathor.version import BASE_VERSION, DEFAULT_VERSION_SUFFIX, _get_version from hathor.version_resource import VersionResource from tests import unittest from tests.resources.base_resource import StubSite, _BaseResourceTest @@ -12,6 +18,11 @@ class BaseVersionTest(_BaseResourceTest._ResourceTest): def setUp(self): super().setUp() self.web = StubSite(VersionResource(self.manager)) + self.tmp_dir = tempfile.mkdtemp() + + def tearDown(self): + super().tearDown() + shutil.rmtree(self.tmp_dir) @inlineCallbacks def test_get(self): @@ -19,6 +30,50 @@ def test_get(self): data = response.json_value() self.assertEqual(data['version'], hathor.__version__) + def test_local_version(self): + """Test that we will return a version with the default prefix when the BUILD_VERSION file + does not exist. + """ + with patch('hathor.version.BUILD_VERSION_FILE_PATH', self.tmp_dir + '/BUILD_VERSION'): + git_head = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode('ascii').strip() + self.assertEqual(_get_version(), f"{BASE_VERSION}-{git_head}-{DEFAULT_VERSION_SUFFIX}") + + def test_build_version(self): + """Test that we will return the version from the BUILD_VERSION file if it is valid, + or the local version if the BUILD_VERSION is invalid. + """ + file_path = self.tmp_dir + '/BUILD_VERSION' + + with patch('hathor.version.BUILD_VERSION_FILE_PATH', file_path): + # Valid BUILD_VERSION files + with open(file_path, 'w') as build_version_file: + build_version_file.write(BASE_VERSION) + self.assertEqual(_get_version(), BASE_VERSION) + + with open(file_path, 'w') as build_version_file: + build_version_file.write(BASE_VERSION + '-rc.1') + self.assertEqual(_get_version(), BASE_VERSION + '-rc.1') + + with open(file_path, 'w') as build_version_file: + build_version_file.write('nightly-a4b3f9c2') + self.assertEqual(_get_version(), 'nightly-a4b3f9c2') + + # BUILD_VERSION with white spaces + with open(file_path, 'w') as build_version_file: + build_version_file.write(' ' + BASE_VERSION + '-rc.1 ') + self.assertEqual(_get_version(), BASE_VERSION + '-rc.1') + + # Invalid BUILD_VERSION files + git_head = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).decode('ascii').strip() + + with open(file_path, 'w') as build_version_file: + build_version_file.write('v1.2.3') + self.assertEqual(_get_version(), f"{BASE_VERSION}-{git_head}-{DEFAULT_VERSION_SUFFIX}") + + with open(file_path, 'w') as build_version_file: + build_version_file.write('1.2.3-beta') + self.assertEqual(_get_version(), f"{BASE_VERSION}-{git_head}-{DEFAULT_VERSION_SUFFIX}") + class SyncV1VersionTest(unittest.SyncV1Params, BaseVersionTest): __test__ = True diff --git a/tests/resources/transaction/test_utxo_search.py b/tests/resources/transaction/test_utxo_search.py index b177b0730..57e4aea28 100644 --- a/tests/resources/transaction/test_utxo_search.py +++ b/tests/resources/transaction/test_utxo_search.py @@ -14,16 +14,10 @@ class BaseUtxoSearchTest(_BaseResourceTest._ResourceTest): __test__ = False def setUp(self): - super().setUp() + super().setUp(utxo_index=True) self.web = StubSite(UtxoSearchResource(self.manager)) self.manager.wallet.unlock(b'MYPASS') - def _manager_kwargs(self): - # TODO: when we are in Python 3.9+ we could use `return super()._manger_kwargs() | {'utxo_index': True}` - kwargs = super()._manager_kwargs() - kwargs.update(utxo_index=True) - return kwargs - @inlineCallbacks def test_simple_gets(self): address = self.get_address(0).encode('ascii') diff --git a/tests/resources/wallet/test_lock.py b/tests/resources/wallet/test_lock.py index f9819ce04..d4ec2f404 100644 --- a/tests/resources/wallet/test_lock.py +++ b/tests/resources/wallet/test_lock.py @@ -9,7 +9,7 @@ class BaseLockTest(_BaseResourceTest._ResourceTest): __test__ = False def setUp(self): - super().setUp() + super().setUp(unlock_wallet=False) self.web = StubSite(LockWalletResource(self.manager)) self.web_unlock = StubSite(UnlockWalletResource(self.manager)) self.web_state = StubSite(StateWalletResource(self.manager)) diff --git a/tests/resources/wallet/test_unlock.py b/tests/resources/wallet/test_unlock.py index 365931320..246ad1919 100644 --- a/tests/resources/wallet/test_unlock.py +++ b/tests/resources/wallet/test_unlock.py @@ -10,7 +10,7 @@ class BaseUnlockTest(_BaseResourceTest._ResourceTest): __test__ = False def setUp(self): - super().setUp() + super().setUp(unlock_wallet=False) self.web = StubSite(UnlockWalletResource(self.manager)) self.web_lock = StubSite(LockWalletResource(self.manager)) self.web_state = StubSite(StateWalletResource(self.manager)) diff --git a/tests/simulation/test_trigger.py b/tests/simulation/test_trigger.py new file mode 100644 index 000000000..572d1a319 --- /dev/null +++ b/tests/simulation/test_trigger.py @@ -0,0 +1,62 @@ +from hathor.p2p.peer_id import PeerId +from hathor.simulator import Simulator +from hathor.simulator.trigger import StopAfterMinimumBalance, StopAfterNMinedBlocks +from tests import unittest + + +class TriggerTestCase(unittest.TestCase): + def setUp(self): + super().setUp() + + self.simulator = Simulator() + self.simulator.start() + + peer_id = PeerId() + self.manager1 = self.simulator.create_peer(peer_id=peer_id) + self.manager1.allow_mining_without_peers() + + print('-' * 30) + print('Simulation seed config:', self.simulator.seed) + print('-' * 30) + + def tearDown(self): + super().tearDown() + self.simulator.stop() + + def test_stop_after_n_mined_blocks(self): + miner1 = self.simulator.create_miner(self.manager1, hashpower=1e6) + miner1.start() + + reactor = self.simulator.get_reactor() + + t0 = reactor.seconds() + trigger = StopAfterNMinedBlocks(miner1, quantity=3) + self.assertEqual(miner1.get_blocks_found(), 0) + self.assertTrue(self.simulator.run(3600, trigger=trigger)) + self.assertEqual(miner1.get_blocks_found(), 3) + self.assertLess(reactor.seconds(), t0 + 3600) + + trigger.reset() + self.assertTrue(self.simulator.run(3600, trigger=trigger)) + self.assertEqual(miner1.get_blocks_found(), 6) + + t0 = reactor.seconds() + trigger = StopAfterNMinedBlocks(miner1, quantity=10) + self.assertTrue(self.simulator.run(3600, trigger=trigger)) + self.assertEqual(miner1.get_blocks_found(), 16) + self.assertLess(reactor.seconds(), t0 + 3600) + + def test_stop_after_minimum_balance(self): + miner1 = self.simulator.create_miner(self.manager1, hashpower=1e6) + miner1.start() + + wallet = self.manager1.wallet + settings = self.simulator.settings + + minimum_balance = 1000_00 # 16 blocks + token_uid = settings.HATHOR_TOKEN_UID + + trigger = StopAfterMinimumBalance(wallet, token_uid, minimum_balance) + self.assertLess(wallet.balance[token_uid].available, minimum_balance) + self.assertTrue(self.simulator.run(3600, trigger=trigger)) + self.assertGreaterEqual(wallet.balance[token_uid].available, minimum_balance) diff --git a/tests/sysctl/__init__.py b/tests/sysctl/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/sysctl/test_p2p.py b/tests/sysctl/test_p2p.py new file mode 100644 index 000000000..bd7216e0b --- /dev/null +++ b/tests/sysctl/test_p2p.py @@ -0,0 +1,137 @@ +import os +import tempfile +from unittest.mock import MagicMock + +from hathor.sysctl import ConnectionsManagerSysctl +from hathor.sysctl.exception import SysctlException +from tests import unittest +from tests.simulation.base import SimulatorTestCase + + +class BaseRandomSimulatorTestCase(SimulatorTestCase): + def test_max_enabled_sync(self): + manager = self.create_peer() + connections = manager.connections + sysctl = ConnectionsManagerSysctl(connections) + + connections._sync_rotate_if_needed = MagicMock() + self.assertEqual(connections._sync_rotate_if_needed.call_count, 0) + + sysctl.set('max_enabled_sync', 10) + self.assertEqual(connections._sync_rotate_if_needed.call_count, 1) + self.assertEqual(connections.MAX_ENABLED_SYNC, 10) + self.assertEqual(sysctl.get('max_enabled_sync'), 10) + + sysctl.set('max_enabled_sync', 10) + self.assertEqual(connections._sync_rotate_if_needed.call_count, 1) + self.assertEqual(connections.MAX_ENABLED_SYNC, 10) + self.assertEqual(sysctl.get('max_enabled_sync'), 10) + + sysctl.set('max_enabled_sync', 5) + self.assertEqual(connections._sync_rotate_if_needed.call_count, 2) + self.assertEqual(connections.MAX_ENABLED_SYNC, 5) + self.assertEqual(sysctl.get('max_enabled_sync'), 5) + + sysctl.set('max_enabled_sync', 0) + self.assertEqual(connections._sync_rotate_if_needed.call_count, 3) + self.assertEqual(connections.MAX_ENABLED_SYNC, 0) + self.assertEqual(sysctl.get('max_enabled_sync'), 0) + + with self.assertRaises(SysctlException): + sysctl.set('max_enabled_sync', -1) + + def test_global_rate_limiter_send_tips(self): + manager = self.create_peer() + connections = manager.connections + sysctl = ConnectionsManagerSysctl(connections) + + path = 'rate_limit.global.send_tips' + + sysctl.set(path, (10, 4)) + limit = connections.rate_limiter.get_limit(connections.GlobalRateLimiter.SEND_TIPS) + self.assertEqual(limit, (10, 4)) + self.assertEqual(sysctl.get(path), (10, 4)) + + sysctl.set(path, (15, 5)) + limit = connections.rate_limiter.get_limit(connections.GlobalRateLimiter.SEND_TIPS) + self.assertEqual(limit, (15, 5)) + self.assertEqual(sysctl.get(path), (15, 5)) + + sysctl.set(path, (0, 0)) + limit = connections.rate_limiter.get_limit(connections.GlobalRateLimiter.SEND_TIPS) + self.assertEqual(limit, None) + self.assertEqual(sysctl.get(path), (0, 0)) + + with self.assertRaises(SysctlException): + sysctl.set(path, (-1, 1)) + + with self.assertRaises(SysctlException): + sysctl.set(path, (1, -1)) + + with self.assertRaises(SysctlException): + sysctl.set(path, (-1, -1)) + + def test_force_sync_rotate(self): + manager = self.create_peer() + connections = manager.connections + sysctl = ConnectionsManagerSysctl(connections) + + connections._sync_rotate_if_needed = MagicMock() + self.assertEqual(connections._sync_rotate_if_needed.call_count, 0) + + sysctl.set('force_sync_rotate', ()) + self.assertEqual(connections._sync_rotate_if_needed.call_count, 1) + self.assertEqual(connections._sync_rotate_if_needed.call_args.kwargs, {'force': True}) + + def test_sync_update_interval(self): + manager = self.create_peer() + connections = manager.connections + sysctl = ConnectionsManagerSysctl(connections) + + sysctl.set('sync_update_interval', 10) + self.assertEqual(connections.lc_sync_update_interval, 10) + self.assertEqual(sysctl.get('sync_update_interval'), 10) + + with self.assertRaises(SysctlException): + sysctl.set('sync_update_interval', -1) + + def test_always_enable_sync(self): + manager = self.create_peer() + connections = manager.connections + sysctl = ConnectionsManagerSysctl(connections) + + sysctl.set('always_enable_sync', ['peer-1', 'peer-2']) + self.assertEqual(connections.always_enable_sync, {'peer-1', 'peer-2'}) + self.assertEqual(set(sysctl.get('always_enable_sync')), {'peer-1', 'peer-2'}) + + sysctl.set('always_enable_sync', []) + self.assertEqual(connections.always_enable_sync, set()) + self.assertEqual(sysctl.get('always_enable_sync'), []) + + with tempfile.TemporaryDirectory() as dir_path: + content = [ + 'peer-id-1', + 'peer-id-2', + ] + + file_path = os.path.join(dir_path, 'a.txt') + fp = open(file_path, 'w') + fp.write('\n'.join(content)) + fp.close() + + sysctl.set('always_enable_sync.readtxt', file_path) + self.assertEqual(connections.always_enable_sync, set(content)) + self.assertEqual(set(sysctl.get('always_enable_sync')), set(content)) + + +class SyncV1RandomSimulatorTestCase(unittest.SyncV1Params, BaseRandomSimulatorTestCase): + __test__ = True + + +class SyncV2RandomSimulatorTestCase(unittest.SyncV2Params, BaseRandomSimulatorTestCase): + __test__ = True + + +# sync-bridge should behave like sync-v2 +class SyncBridgeRandomSimulatorTestCase(unittest.SyncBridgeParams, SyncV2RandomSimulatorTestCase): + __test__ = True diff --git a/tests/sysctl/test_sysctl.py b/tests/sysctl/test_sysctl.py new file mode 100644 index 000000000..ead97371c --- /dev/null +++ b/tests/sysctl/test_sysctl.py @@ -0,0 +1,230 @@ +from typing import cast +from unittest.mock import MagicMock, patch + +from twisted.test import proto_helpers + +from hathor.sysctl import Sysctl +from hathor.sysctl.exception import SysctlEntryNotFound, SysctlReadOnlyEntry, SysctlWriteOnlyEntry +from hathor.sysctl.factory import SysctlFactory +from hathor.sysctl.sysctl import SysctlCommand +from tests import unittest + + +class SysctlTest(unittest.TestCase): + # We need this patch because pydantic.validate_arguments fails when it gets a mock function. + @patch('hathor.sysctl.sysctl.validate_arguments', new=lambda x: x) + def setUp(self) -> None: + super().setUp() + + net = Sysctl() + net.register( + 'max_connections', + MagicMock(return_value=3), # int + MagicMock(), + ) + net.register( + 'readonly', + MagicMock(return_value=0.25), # float + None, + ) + net.register( + 'rate_limit', + MagicMock(return_value=(4, 1)), # Tuple[int, float] + MagicMock(), + ) + core = Sysctl() + core.register( + 'loglevel', + MagicMock(return_value='info'), # str + MagicMock(), + ) + core.register( + 'writeonly', + None, + MagicMock(), # int + ) + + multi = Sysctl() + multi.register( + 'useless', + None, + None, + ) + + self.root = Sysctl() + self.root.put_child('net', net) + self.root.put_child('core', core) + self.root.put_child('ab.bc.cd', multi) + + factory = SysctlFactory(self.root) + self.proto = factory.buildProtocol(('127.0.0.1', 0)) + self.tr = proto_helpers.StringTransport() + self.proto.makeConnection(self.tr) + + ############## + # Get + ############## + + def test_get_int(self) -> None: + self.assertEqual(3, self.root.get('net.max_connections')) + + def test_get_str(self) -> None: + self.assertEqual('info', self.root.get('core.loglevel')) + + def test_get_readonly(self) -> None: + self.assertEqual(0.25, self.root.get('net.readonly')) + + def test_get_tuple(self) -> None: + self.assertEqual((4, 1), self.root.get('net.rate_limit')) + + def test_get_unknown(self) -> None: + with self.assertRaises(SysctlEntryNotFound): + self.root.get('net.unknown') + + def test_get_writeonly(self) -> None: + with self.assertRaises(SysctlWriteOnlyEntry): + self.root.get('core.writeonly') + + ############## + # Set + ############## + + def test_set_int(self) -> None: + self.root.set('net.max_connections', 3) + setter = cast(MagicMock, self.root._get_setter('net.max_connections')) + self.assertEqual(1, setter.call_count) + self.assertEqual((3,), setter.call_args.args) + + def test_set_str(self) -> None: + self.root.set('core.loglevel', 'debug') + setter = cast(MagicMock, self.root._get_setter('core.loglevel')) + self.assertEqual(1, setter.call_count) + self.assertEqual(('debug',), setter.call_args.args) + + def test_set_readonly(self) -> None: + with self.assertRaises(SysctlReadOnlyEntry): + self.root.set('net.readonly', 0.50) + + def test_set_tuple(self) -> None: + self.root.set('net.rate_limit', (8, 2)) + setter = cast(MagicMock, self.root._get_setter('net.rate_limit')) + self.assertEqual(1, setter.call_count) + self.assertEqual((8, 2), setter.call_args.args) + + def test_set_unknown(self) -> None: + with self.assertRaises(SysctlEntryNotFound): + self.root.set('net.unknown', 1) + + def test_set_writeonly(self) -> None: + self.root.set('core.writeonly', 1) + setter = cast(MagicMock, self.root._get_setter('core.writeonly')) + self.assertEqual(1, setter.call_count) + self.assertEqual((1,), setter.call_args.args) + + ############## + # Others + ############## + + def test_get_command(self) -> None: + cmd = self.root.get_command('ab.bc.cd.useless') + self.assertEqual(cmd, SysctlCommand(None, None)) + + with self.assertRaises(SysctlEntryNotFound): + cmd = self.root.get_command('ab.bc.c.useless') + + def test_get_all(self) -> None: + all_items = set(self.root.get_all()) + self.assertEqual(all_items, { + ('net.max_connections', 3), + ('core.loglevel', 'info'), + ('net.rate_limit', (4, 1)), + ('net.readonly', 0.25), + }) + + ################## + # Protocol: Get + ################## + + def test_proto_get_int(self) -> None: + self.proto.lineReceived(b'net.max_connections') + self.assertEqual(b'3\n', self.tr.value()) + + def test_proto_get_str(self) -> None: + self.proto.lineReceived(b'core.loglevel') + self.assertEqual(b'"info"\n', self.tr.value()) + + def test_proto_get_tuple(self) -> None: + self.proto.lineReceived(b'net.rate_limit') + self.assertEqual(b'4, 1\n', self.tr.value()) + + def test_proto_get_unknown(self) -> None: + self.proto.lineReceived(b'net.unknown') + self.assertEqual(b'[error] net.unknown not found\n', self.tr.value()) + + def test_proto_get_readonly(self) -> None: + self.proto.lineReceived(b'net.readonly') + self.assertEqual(b'0.25\n', self.tr.value()) + + def test_proto_get_writeonly(self) -> None: + self.proto.lineReceived(b'core.writeonly') + self.assertEqual(b'[error] cannot read from core.writeonly\n', self.tr.value()) + + ################## + # Protocol: Set + ################## + + def test_proto_set_int(self) -> None: + self.proto.lineReceived(b'net.max_connections=3') + setter = cast(MagicMock, self.root._get_setter('net.max_connections')) + self.assertEqual(1, setter.call_count) + self.assertEqual((3,), setter.call_args.args) + + def test_proto_set_str(self) -> None: + self.proto.lineReceived(b'core.loglevel="debug"') + setter = cast(MagicMock, self.root._get_setter('core.loglevel')) + self.assertEqual(1, setter.call_count) + self.assertEqual(('debug',), setter.call_args.args) + + def test_proto_set_readonly(self) -> None: + self.proto.lineReceived(b'net.readonly=0.50') + self.assertEqual(b'[error] cannot write to net.readonly\n', self.tr.value()) + + def test_proto_set_unknown(self) -> None: + self.proto.lineReceived(b'net.unknown=0.50') + self.assertEqual(b'[error] net.unknown not found\n', self.tr.value()) + + def test_proto_set_tuple(self) -> None: + self.proto.lineReceived(b'net.rate_limit=8, 2') + setter = cast(MagicMock, self.root._get_setter('net.rate_limit')) + self.assertEqual(1, setter.call_count) + self.assertEqual((8, 2), setter.call_args.args) + + def test_proto_set_writeonly(self) -> None: + self.proto.lineReceived(b'core.writeonly=1') + setter = cast(MagicMock, self.root._get_setter('core.writeonly')) + self.assertEqual(1, setter.call_count) + self.assertEqual((1,), setter.call_args.args) + + def test_set_invalid_value(self) -> None: + self.proto.lineReceived(b'net.max_connections=(3') + self.assertEqual(b'[error] value: wrong format\n', self.tr.value()) + + def test_set_invalid_json(self) -> None: + self.proto.lineReceived(b'net.max_connections=\'a\'') + self.assertEqual(b'[error] value: wrong format\n', self.tr.value()) + + ################## + # Protocol: Others + ################## + + def test_proto_backup(self) -> None: + self.proto.lineReceived(b'!backup') + output = self.tr.value() + lines = set(output.split(b'\n')) + self.assertEqual(lines, { + b'core.loglevel="info"', + b'net.max_connections=3', + b'net.rate_limit=4, 1', + b'net.readonly=0.25', + b'', # output ends with a new line (\n) + }) diff --git a/tests/test_utils/__init__.py b/tests/test_utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/test_utils/test_api.py b/tests/test_utils/test_api.py new file mode 100644 index 000000000..69021b6a0 --- /dev/null +++ b/tests/test_utils/test_api.py @@ -0,0 +1,55 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from unittest.mock import Mock + +from hathor.utils.api import ErrorResponse, QueryParams + + +def test_query_params_from_request(): + request = Mock() + request.requestHeaders.getRawHeaders = Mock(return_value=None) + request.args = {b'a': [b'abc'], b'b': [b'123']} + result = DummyQueryParams.from_request(request) + + assert isinstance(result, DummyQueryParams) + assert result.a == 'abc' + assert result.b == 123 + + +def test_query_params_from_request_with_encoding(): + request = Mock() + request.requestHeaders.getRawHeaders = Mock(return_value=['application/json; charset=utf-16']) + request.args = { + 'a'.encode('utf-16'): ['abc'.encode('utf-16')], + 'b'.encode('utf-16'): ['123'.encode('utf-16')] + } + result = DummyQueryParams.from_request(request) + + assert isinstance(result, DummyQueryParams) + assert result.a == 'abc' + assert result.b == 123 + + +def test_query_params_from_request_invalid(): + request = Mock() + request.requestHeaders.getRawHeaders = Mock(return_value=None) + request.args = {b'a': [b'abc'], b'b': [b'123', b'456']} + result = DummyQueryParams.from_request(request) + + assert isinstance(result, ErrorResponse) + + +class DummyQueryParams(QueryParams): + a: str + b: int diff --git a/tests/test_utils/test_list.py b/tests/test_utils/test_list.py new file mode 100644 index 000000000..8d920c012 --- /dev/null +++ b/tests/test_utils/test_list.py @@ -0,0 +1,36 @@ +# Copyright 2023 Hathor Labs +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pytest + +from hathor.utils.list import single_or_none + + +def test_single_or_none_empty(): + result = single_or_none([]) + + assert result is None + + +@pytest.mark.parametrize('value', [None, 1, 10.4, 'test', b'test']) +def test_single_or_none_one(value): + result = single_or_none([value]) + + assert result == value + + +def test_single_or_none_more_than_one(): + with pytest.raises(AssertionError) as exc_info: + single_or_none([1, 2, 3]) + + assert exc_info.value.args[0] == 'expected one value at most' diff --git a/tests/tx/test_indexes.py b/tests/tx/test_indexes.py index c2f8d62a6..d3b1edb72 100644 --- a/tests/tx/test_indexes.py +++ b/tests/tx/test_indexes.py @@ -904,7 +904,7 @@ class SyncV1MemoryIndexesTest(unittest.SyncV1Params, BaseMemoryIndexesTest): class SyncV2MemoryIndexesTest(unittest.SyncV2Params, BaseMemoryIndexesTest): __test__ = True - def test_deps_index(self): + def test_deps_index(self) -> None: from hathor.indexes.memory_deps_index import MemoryDepsIndex add_new_blocks(self.manager, 5, advance_clock=15) @@ -949,11 +949,11 @@ class SyncV1RocksDBIndexesTest(unittest.SyncV1Params, BaseRocksDBIndexesTest): class SyncV2RocksDBIndexesTest(unittest.SyncV2Params, BaseRocksDBIndexesTest): __test__ = True - def test_deps_index(self): + def test_deps_index(self) -> None: from hathor.indexes.rocksdb_deps_index import RocksDBDepsIndex indexes = self.manager.tx_storage.indexes - deps_index = indexes.deps = RocksDBDepsIndex(indexes._db, _force=True) + indexes.deps = RocksDBDepsIndex(indexes._db, _force=True) add_new_blocks(self.manager, 5, advance_clock=15) add_blocks_unlock_reward(self.manager) diff --git a/tests/tx/test_indexes2.py b/tests/tx/test_indexes2.py index d53212b0e..ea8aea4ea 100644 --- a/tests/tx/test_indexes2.py +++ b/tests/tx/test_indexes2.py @@ -44,9 +44,9 @@ def test_timestamp_index(self): # setup two indexes with different backends from hathor.indexes.memory_timestamp_index import MemoryTimestampIndex from hathor.indexes.rocksdb_timestamp_index import RocksDBTimestampIndex - from hathor.indexes.timestamp_index import RangeIdx - rocksdb_index = RocksDBTimestampIndex(self.create_tmp_rocksdb_db(), 'foo') - memory_index = MemoryTimestampIndex() + from hathor.indexes.timestamp_index import RangeIdx, ScopeType + rocksdb_index = RocksDBTimestampIndex(self.create_tmp_rocksdb_db(), scope_type=ScopeType.ALL) + memory_index = MemoryTimestampIndex(scope_type=ScopeType.ALL) for tx in self.transactions: rocksdb_index.add_tx(tx) memory_index.add_tx(tx) diff --git a/tests/tx/test_stratum.py b/tests/tx/test_stratum.py index dd4e531f7..0769c10af 100644 --- a/tests/tx/test_stratum.py +++ b/tests/tx/test_stratum.py @@ -38,7 +38,8 @@ def setUp(self): super().setUp() self.manager = self.create_peer('testnet') self.manager.allow_mining_without_peers() - self.factory = StratumFactory(self.manager, port=8123, reactor=MemoryReactorHeapClock()) + port = self.rng.randint(8000, 9000) + self.factory = StratumFactory(self.manager, port=port, reactor=MemoryReactorHeapClock()) self.factory.start() self.protocol = self.factory.buildProtocol('127.0.0.1') self.transport = StringTransportWithDisconnection() diff --git a/tests/tx/test_tx_storage.py b/tests/tx/test_tx_storage.py index bc626607e..bd77f2028 100644 --- a/tests/tx/test_tx_storage.py +++ b/tests/tx/test_tx_storage.py @@ -17,7 +17,7 @@ from hathor.transaction.storage import TransactionCacheStorage, TransactionMemoryStorage, TransactionRocksDBStorage from hathor.transaction.storage.exceptions import TransactionDoesNotExist from hathor.transaction.transaction_metadata import ValidationState -from hathor.wallet import Wallet +from tests.unittest import TestBuilder from tests.utils import ( BURN_ADDRESS, HAS_ROCKSDB, @@ -36,30 +36,33 @@ class BaseTransactionStorageTest(unittest.TestCase): __test__ = False def setUp(self, tx_storage, reactor=None): - from hathor.manager import HathorManager + self.tmpdir = tempfile.mkdtemp() + + builder = TestBuilder() + builder.set_tx_storage(tx_storage) + builder.enable_keypair_wallet(self.tmpdir, unlock=b'teste') + builder.enable_address_index() + builder.enable_tokens_index() + if reactor is not None: + builder.set_reactor(reactor) + + artifacts = builder.build() + self.reactor = artifacts.reactor + self.pubsub = artifacts.pubsub + self.manager = artifacts.manager + self.tx_storage = artifacts.tx_storage + + assert artifacts.wallet is not None - if not reactor: - self.reactor = MemoryReactorHeapClock() - else: - self.reactor = reactor self.reactor.advance(time.time()) - self.tx_storage = tx_storage - tx_storage._manually_initialize() + self.tx_storage._manually_initialize() assert tx_storage.first_timestamp > 0 self.genesis = self.tx_storage.get_all_genesis() self.genesis_blocks = [tx for tx in self.genesis if tx.is_block] self.genesis_txs = [tx for tx in self.genesis if not tx.is_block] - self.tmpdir = tempfile.mkdtemp() - wallet = Wallet(directory=self.tmpdir) - wallet.unlock(b'teste') - self.manager = HathorManager(self.reactor, tx_storage=self.tx_storage, wallet=wallet) - - self.tx_storage.indexes.enable_address_index(self.manager.pubsub) - self.tx_storage.indexes.enable_tokens_index() - block_parents = [tx.hash for tx in chain(self.genesis_blocks, self.genesis_txs)] output = TxOutput(200, P2PKH.create_output_script(BURN_ADDRESS)) self.block = Block(timestamp=MIN_TIMESTAMP, weight=12, outputs=[output], parents=block_parents, @@ -193,7 +196,7 @@ def validate_save(self, obj): self.assertEqual(obj.is_block, loaded_obj1.is_block) # Testing add and remove from cache - if self.tx_storage.with_index: + if self.tx_storage.indexes is not None: if obj.is_block: self.assertTrue(obj.hash in self.tx_storage.indexes.block_tips.tx_last_interval) else: @@ -201,14 +204,14 @@ def validate_save(self, obj): self.tx_storage.del_from_indexes(obj) - if self.tx_storage.with_index: + if self.tx_storage.indexes is not None: if obj.is_block: self.assertFalse(obj.hash in self.tx_storage.indexes.block_tips.tx_last_interval) else: self.assertFalse(obj.hash in self.tx_storage.indexes.tx_tips.tx_last_interval) self.tx_storage.add_to_indexes(obj) - if self.tx_storage.with_index: + if self.tx_storage.indexes is not None: if obj.is_block: self.assertTrue(obj.hash in self.tx_storage.indexes.block_tips.tx_last_interval) else: diff --git a/tests/unittest.py b/tests/unittest.py index 0ae39e8d1..65d8f2bd9 100644 --- a/tests/unittest.py +++ b/tests/unittest.py @@ -9,14 +9,14 @@ from twisted.internet.task import Clock from twisted.trial import unittest +from hathor.builder import BuildArtifacts, Builder from hathor.conf import HathorSettings from hathor.daa import TestMode, _set_test_mode -from hathor.manager import HathorManager from hathor.p2p.peer_id import PeerId from hathor.p2p.sync_version import SyncVersion -from hathor.storage.rocksdb_storage import RocksDBStorage +from hathor.simulator.clock import MemoryReactorHeapClock from hathor.transaction import BaseTransaction -from hathor.util import Random, get_environment_info, reactor +from hathor.util import Random, Reactor, reactor from hathor.wallet import HDWallet, Wallet logger = get_logger() @@ -30,14 +30,26 @@ def shorten_hash(container): return container_type(h[-2:].hex() for h in container) -def _load_peer_id_pool(file_path: str = 'tests/peer_id_pool.json') -> Iterator[PeerId]: +def _load_peer_id_pool(file_path: Optional[str] = None) -> Iterator[PeerId]: import json + + if file_path is None: + file_path = _get_default_peer_id_pool_filepath() + with open(file_path) as peer_id_pool_file: peer_id_pool_dict = json.load(peer_id_pool_file) for peer_id_dict in peer_id_pool_dict: yield PeerId.create_from_json(peer_id_dict) +def _get_default_peer_id_pool_filepath(): + this_file_path = os.path.dirname(__file__) + file_name = 'peer_id_pool.json' + file_path = os.path.join(this_file_path, file_name) + + return file_path + + PEER_ID_POOL = list(_load_peer_id_pool()) # XXX: Sync*Params classes should be inherited before the TestCase class when a sync version is needed @@ -58,6 +70,29 @@ class SyncBridgeParams: _enable_sync_v2 = True +class TestBuilder(Builder): + def __init__(self) -> None: + super().__init__() + self.set_network('testnet') + + def build(self) -> BuildArtifacts: + artifacts = super().build() + # We disable rate limiter by default for tests because most tests were designed + # to run without rate limits. You can enable it in your unittest if you need. + artifacts.manager.connections.disable_rate_limiter() + return artifacts + + def _get_peer_id(self) -> PeerId: + if self._peer_id is not None: + return self._peer_id + return PeerId() + + def _get_reactor(self) -> Reactor: + if self._reactor: + return self._reactor + return MemoryReactorHeapClock() + + class TestCase(unittest.TestCase): _enable_sync_v1: bool _enable_sync_v2: bool @@ -106,8 +141,7 @@ def _create_test_wallet(self): """ Generate a Wallet with a number of keypairs for testing :rtype: Wallet """ - tmpdir = tempfile.mkdtemp() - self.tmpdirs.append(tmpdir) + tmpdir = self.mkdtemp() wallet = Wallet(directory=tmpdir) wallet.unlock(b'MYPASS') @@ -117,7 +151,8 @@ def _create_test_wallet(self): def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unlock_wallet=True, wallet_index=False, capabilities=None, full_verification=True, enable_sync_v1=None, enable_sync_v2=None, - checkpoints=None, utxo_index=False, event_storage=None): + checkpoints=None, utxo_index=False, event_manager=None, use_memory_index=None, start_manager=True, + pubsub=None, event_storage=None, event_ws_factory=None): if enable_sync_v1 is None: assert hasattr(self, '_enable_sync_v1'), ('`_enable_sync_v1` has no default by design, either set one on ' 'the test class or pass `enable_sync_v1` by argument') @@ -128,39 +163,71 @@ def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unloc enable_sync_v2 = self._enable_sync_v2 assert enable_sync_v1 or enable_sync_v2, 'enable at least one sync version' + builder = TestBuilder() \ + .set_rng(self.rng) \ + .set_reactor(self.clock) \ + .set_network(network) \ + .set_full_verification(full_verification) + + if checkpoints is not None: + builder.set_checkpoints(checkpoints) + + if pubsub: + builder.set_pubsub(pubsub) + if peer_id is None: peer_id = PeerId() + builder.set_peer_id(peer_id) + if not wallet: wallet = self._create_test_wallet() if unlock_wallet: wallet.unlock(b'MYPASS') - if tx_storage is None: - if self.use_memory_storage: - from hathor.transaction.storage.memory_storage import TransactionMemoryStorage - tx_storage = TransactionMemoryStorage() - else: - from hathor.transaction.storage.rocksdb_storage import TransactionRocksDBStorage - directory = tempfile.mkdtemp() - self.tmpdirs.append(directory) - rocksdb_storage = RocksDBStorage(path=directory) - self._pending_cleanups.append(rocksdb_storage.close) - tx_storage = TransactionRocksDBStorage(rocksdb_storage) - manager = HathorManager( - self.clock, - peer_id=peer_id, - network=network, - wallet=wallet, - tx_storage=tx_storage, - event_storage=event_storage, - wallet_index=wallet_index, - utxo_index=utxo_index, - capabilities=capabilities, - rng=self.rng, - enable_sync_v1=enable_sync_v1, - enable_sync_v2=enable_sync_v2, - checkpoints=checkpoints, - environment_info=get_environment_info("", peer_id.id) - ) + builder.set_wallet(wallet) + + if event_storage: + builder.set_event_storage(event_storage) + + if event_manager: + builder.set_event_manager(event_manager) + + if event_ws_factory: + builder.enable_event_manager(event_ws_factory=event_ws_factory) + + if tx_storage is not None: + builder.set_tx_storage(tx_storage) + + if self.use_memory_storage: + builder.use_memory() + else: + directory = tempfile.mkdtemp() + self.tmpdirs.append(directory) + builder.use_rocksdb(directory) + + if use_memory_index is True: + builder.force_memory_index() + + if enable_sync_v1 is True: + builder.enable_sync_v1() + elif enable_sync_v1 is False: + builder.disable_sync_v1() + + if enable_sync_v2 is True: + builder.enable_sync_v2() + elif enable_sync_v2 is False: + builder.disable_sync_v2() + + if wallet_index: + builder.enable_wallet_index() + + if utxo_index: + builder.enable_utxo_index() + + artifacts = builder.build() + manager = artifacts.manager + + if artifacts.rocksdb_storage: + self._pending_cleanups.append(artifacts.rocksdb_storage.close) # XXX: just making sure that tests set this up correctly if enable_sync_v2: @@ -168,14 +235,17 @@ def create_peer(self, network, peer_id=None, wallet=None, tx_storage=None, unloc else: assert SyncVersion.V2 not in manager.connections._sync_factories if enable_sync_v1: - assert SyncVersion.V1 in manager.connections._sync_factories + assert SyncVersion.V1 not in manager.connections._sync_factories + assert SyncVersion.V1_1 in manager.connections._sync_factories else: assert SyncVersion.V1 not in manager.connections._sync_factories + assert SyncVersion.V1_1 not in manager.connections._sync_factories manager.avg_time_between_blocks = 0.0001 - manager._full_verification = full_verification - manager.start() - self.run_to_completion() + + if start_manager: + manager.start() + self.run_to_completion() return manager def run_to_completion(self): diff --git a/tests/utils.py b/tests/utils.py index 75e05c52f..3d6a77bbd 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -6,7 +6,7 @@ import time import urllib.parse from dataclasses import dataclass -from typing import List, Optional, Tuple, cast +from typing import Iterator, List, Optional, Tuple, TypeVar, cast import requests from hathorlib.scripts import DataScript @@ -14,7 +14,9 @@ from hathor.conf import HathorSettings from hathor.crypto.util import decode_address, get_address_b58_from_public_key, get_private_key_from_bytes -from hathor.event.base_event import BaseEvent +from hathor.event.model.base_event import BaseEvent +from hathor.event.model.event_data import TxData, TxMetadata +from hathor.event.model.event_type import EventType from hathor.manager import HathorManager from hathor.transaction import BaseTransaction, Transaction, TxInput, TxOutput, genesis from hathor.transaction.scripts import P2PKH, HathorScript, Opcode, parse_address_script @@ -653,27 +655,49 @@ def add_tx_with_data_script(manager: 'HathorManager', data: List[str], propagate class EventMocker: rng: Random next_id: int = 0 + tx_data = TxData( + hash='abc', + nonce=123, + timestamp=456, + version=1, + weight=10, + inputs=[], + outputs=[], + parents=[], + tokens=[], + metadata=TxMetadata( + hash='abc', + spent_outputs=[], + conflict_with=[], + voided_by=[], + received_by=[], + children=[], + twins=[], + accumulated_weight=10, + score=20, + height=100, + validation='validation' + ) + ) def gen_next_id(self) -> int: next_id = self.next_id self.next_id += 1 return next_id - def generate_mocked_event(self, id: Optional[int] = None) -> BaseEvent: - """ Generates a mocked event with a best block found message + def generate_mocked_event(self, event_id: Optional[int] = None, group_id: Optional[int] = None) -> BaseEvent: + """ Generates a mocked event with the best block found message """ - hash = hashlib.sha256(self.generate_random_word(10).encode('utf-8')) - peer_id_mock = hash.hexdigest() + _hash = hashlib.sha256(self.generate_random_word(10).encode('utf-8')) + peer_id_mock = _hash.hexdigest() return BaseEvent( - id=id or self.gen_next_id(), + id=event_id or self.gen_next_id(), peer_id=peer_id_mock, timestamp=1658892990, - type='network:best_block_found', - group_id=0, - data={ - "data": "test" - }, + type=EventType.VERTEX_METADATA_CHANGED, + group_id=group_id, + data=self.tx_data, ) def generate_random_word(self, length: int) -> str: @@ -681,3 +705,55 @@ def generate_random_word(self, length: int) -> str: """ letters = string.ascii_lowercase return ''.join(self.rng.choice(letters) for i in range(length)) + + @classmethod + def create_event(cls, event_id: int) -> BaseEvent: + """ Generates a mocked event with fixed properties, except the ID + """ + return BaseEvent( + peer_id='123', + id=event_id, + timestamp=123456, + type=EventType.VERTEX_METADATA_CHANGED, + data=cls.tx_data + ) + + +T = TypeVar('T') + + +def zip_chunkify(flat_list: List[T], chunked_list: List[List[T]]) -> Iterator[Tuple[List[T], List[T]]]: + """ + Takes two lists, one flat and one chunked. Chunks the first one into chunks of the same size as the second. + Returns a zipped list where each item is a tuple of chunks, one from each list. + + >>> list(zip_chunkify([], [])) + [] + >>> list(zip_chunkify([], [[]])) + [([], [])] + >>> list(zip_chunkify([], [[], []])) + [([], []), ([], [])] + >>> list(zip_chunkify([1], [[2]])) + [([1], [2])] + >>> list(zip_chunkify([1, 1], [[2]])) + Traceback (most recent call last): + ... + ValueError: lists should have the same amount of items + >>> list(zip_chunkify([1], [[2], [2]])) + Traceback (most recent call last): + ... + ValueError: lists should have the same amount of items + >>> list(zip_chunkify([1, 1], [[2], [2]])) + [([1], [2]), ([1], [2])] + >>> list(zip_chunkify([0, 2, 4, 6, 8, 10, 12], [[1], [3, 5], [7], [9, 11, 13]])) + [([0], [1]), ([2, 4], [3, 5]), ([6], [7]), ([8, 10, 12], [9, 11, 13])] + """ + if len(flat_list) != sum(map(len, chunked_list)): + raise ValueError('lists should have the same amount of items') + + flat_iter = iter(flat_list) + + for chunk in chunked_list: + items = [next(flat_iter) for _ in chunk] + + yield items, chunk