diff --git a/.bumpversion.cfg b/.bumpversion.cfg index a9f550e4..44db103a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.1.0 +current_version = 2.0.0 commit = True tag = True diff --git a/.circleci/config.yml b/.circleci/config.yml index f975809e..03708697 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,27 +1,34 @@ version: 2.1 orbs: - python: circleci/python@0.2.1 + python: circleci/python@1.0.0 win: circleci/windows@2.2.0 workflows: version: 2 build_test_deploy: jobs: - - build + - build: + filters: + branches: + ignore: + - gh-pages - test_integration: requires: - build + filters: + branches: + ignore: + - gh-pages matrix: parameters: - python-version: ["2.7.16", "2.7.18", "3.5.10", "3.6.5", "3.7.7", "3.8.6", "3.9.3", "latest"] + python-version: ["3.6.5", "3.7.7", "3.8.6", "3.9.3", "3.9.6", "3.9.9", "latest"] - hold: type: approval requires: - test_integration - filters: branches: only: @@ -33,22 +40,6 @@ workflows: requires: - hold - # upload_test: - # triggers: - # - schedule: - # cron: "0,30 * * * *" - # filters: - # branches: - # only: - # - jh/use-xxhash-for-integration-test - - # jobs: - # - build - - # - upload_test_job: - # requires: - # - build - jobs: build: docker: @@ -56,13 +47,11 @@ jobs: steps: - checkout: name: Checkout Git - - run: name: Build Package command: | echo -e "Running sdist" python setup.py sdist - - persist_to_workspace: root: /home/circleci/project/ paths: @@ -73,60 +62,32 @@ jobs: parameters: python-version: type: string - docker: - image: circleci/python:<< parameters.python-version >> - steps: - attach_workspace: at: /tmp/artifact name: Attach build artifact - - run: name: Install package command: | pip install --user '/tmp/artifact' - - - run: - name: Run integration test - command: | - python /tmp/artifact/tests/integration.py - - upload_test_job: - description: Upload test - docker: - - image: circleci/python:latest - - steps: - - attach_workspace: - at: /tmp/artifact - name: Attach build artifact - - - run: - name: Install package - command: | - pip install '/tmp/artifact' - - run: name: Run integration test command: | python /tmp/artifact/tests/integration.py - deploy: docker: - image: circleci/python:latest - steps: - attach_workspace: at: /tmp/artifact name: Attach build artifact - - run: name: Install dependencies command: | pip install setuptools wheel twine - - run: name: init .pypirc command: | @@ -134,7 +95,6 @@ jobs: echo -e "[pypi]" >> ~/.pypirc echo -e "username = $TWINE_USERNAME" >> ~/.pypirc echo -e "password = $TWINE_PASSWORD" >> ~/.pypirc - - run: name: Upload to pypi command: | diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..7c188d61 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,13 @@ +## [DEVREL-XXXX] + +### Description: +Please provide a short description of what this PR does + +### Depends on: +- Does this PR depend on any other ones? + +### Includes changes from: +- Does this PR includ changes from another PR? + +### I'd like feedback on: +- What would you like feedback on? diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml new file mode 100644 index 00000000..994a5013 --- /dev/null +++ b/.github/workflows/documentation.yml @@ -0,0 +1,24 @@ +name: Documentation +on: + push: + branches: + - develop +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v2 + - uses: actions/checkout@master + with: + fetch-depth: 0 # otherwise, you will failed to push refs to dest repo + ref: develop + - name: Build and Commit + uses: sphinx-notes/pages@v2 + with: + documentation_path: './docs' + requirements_path: './docs/requirements.txt' + - name: Push changes + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: gh-pages diff --git a/.gitignore b/.gitignore index 6b709839..37bc36ef 100644 --- a/.gitignore +++ b/.gitignore @@ -108,3 +108,6 @@ venv.bak/ Pipfile Pipfile.lock .vscode/launch.json +.vscode/settings.json + +pyproject.toml \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..de288e1e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.formatting.provider": "black" +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..162373c5 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,18 @@ +FROM python:3.11-buster as builder + +RUN pip install poetry==1.8.3 + +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=1 \ + POETRY_VIRTUALENVS_CREATE=1 \ + POETRY_CACHE_DIR=/tmp/poetry_cache + +WORKDIR /frameio + +COPY README.md README.md +COPY pyproject.toml poetry.lock ./ +COPY frameioclient frameioclient + +RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --without dev + +ENTRYPOINT [ "poetry", "run", "fiocli" ] diff --git a/Makefile b/Makefile index 3c2e716b..c6126f0e 100644 --- a/Makefile +++ b/Makefile @@ -15,3 +15,24 @@ bump-patch: clean: find . -name "*.pyc" -exec rm -f {} \; + +test: + cd tests && pipenv run python integration.py + +package: + pipenv run python3 setup.py sdist bdist_wheel + +build-docker: + docker build . -t benchmark + +run-benchmark: + docker run -it -e $1 benchmark + +format: + black frameioclient + +view-docs: + cd docs && pip install -r requirements.txt && make dev + +publish-docs: + cd docs && pip install -r requirements.txt && make jekyll && make publish diff --git a/README.md b/README.md index 7d9f2af6..d8c4dd94 100644 --- a/README.md +++ b/README.md @@ -24,12 +24,58 @@ $ git clone https://github.com/frameio/python-frameio-client $ pip install . ``` -_Note: The Frame.io Python client may not work correctly in Python 3.8+_ +### Developing +Install the package into your development environment and link to it by running the following: + +```sh +pipenv install -e . -pre +``` ## Documentation [Frame.io API Documentation](https://developer.frame.io/docs) +### Use CLI +When you install this package, a cli tool called `fioctl` will also be installed to your environment. + +**To upload a file or folder** +```sh +fioctl \ +--token fio-u-YOUR_TOKEN_HERE \ +--destination "YOUR TARGET FRAME.IO PROJECT OR FOLDER" \ +--target "YOUR LOCAL SYSTEM DIRECTORY" \ +--threads 8 +``` + +**To download a file, project, or folder** +```sh +fioctl \ +--token fio-u-YOUR_TOKEN_HERE \ +--destination "YOUR LOCAL SYSTEM DIRECTORY" \ +--target "YOUR TARGET FRAME.IO PROJECT OR FOLDER" \ +--threads 2 +``` + +### Links + +**Sphinx Documentation** +- https://pythonhosted.org/sphinxcontrib-restbuilder/ +- https://www.npmjs.com/package/rst-selector-parser +- https://sphinx-themes.org/sample-sites/furo/_sources/index.rst.txt +- https://developer.mantidproject.org/Standards/DocumentationGuideForDevs.html +- https://sublime-and-sphinx-guide.readthedocs.io/en/latest/code_blocks.html +- https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html +- https://stackoverflow.com/questions/64451966/python-sphinx-how-to-embed-code-into-a-docstring +- https://pythonhosted.org/an_example_pypi_project/sphinx.html + +**Decorators** +- https://docs.python.org/3.7/library/functools.html +- https://realpython.com/primer-on-python-decorators/ +- https://www.sphinx-doc.org/en/master/usage/quickstart.html +- https://www.geeksforgeeks.org/decorators-with-parameters-in-python/ +- https://stackoverflow.com/questions/43544954/why-does-sphinx-autodoc-output-a-decorators-docstring-when-there-are-two-decora + + ## Usage _Note: A valid token is required to make requests to Frame.io. Go to our [Developer Portal](https://developer.frame.io/) to get a token!_ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..88536142 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,36 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = dist + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +publish: + python publish.py + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +jekyll: + sphinx-build -b jekyll . dist/markdown + +rst: + sphinx-build -b rst . dist/rst + +html: + sphinx-build -b html . dist/html + +dev: + sphinx-autobuild -b html . _build/html + diff --git a/docs/classes/assets.rst b/docs/classes/assets.rst new file mode 100644 index 00000000..ff78e6d6 --- /dev/null +++ b/docs/classes/assets.rst @@ -0,0 +1,5 @@ +Assets +========================= + +.. autoclass:: frameioclient.Asset + :members: diff --git a/docs/classes/comments.rst b/docs/classes/comments.rst new file mode 100644 index 00000000..ea9f45cc --- /dev/null +++ b/docs/classes/comments.rst @@ -0,0 +1,5 @@ +Comments +=================== + +.. autoclass:: frameioclient.Comment + :members: diff --git a/docs/classes/index.rst b/docs/classes/index.rst new file mode 100644 index 00000000..be5258bf --- /dev/null +++ b/docs/classes/index.rst @@ -0,0 +1,12 @@ +Classes +===================== + +.. toctree:: + users + assets + comments + logs + projects + teams + sharing + search \ No newline at end of file diff --git a/docs/classes/logs.rst b/docs/classes/logs.rst new file mode 100644 index 00000000..098f0fc2 --- /dev/null +++ b/docs/classes/logs.rst @@ -0,0 +1,5 @@ +Audit Logs +=================== + +.. autoclass:: frameioclient.AuditLogs + :members: diff --git a/docs/classes/projects.rst b/docs/classes/projects.rst new file mode 100644 index 00000000..0998acd0 --- /dev/null +++ b/docs/classes/projects.rst @@ -0,0 +1,5 @@ +Projects +=================== + +.. autoclass:: frameioclient.Project + :members: diff --git a/docs/classes/search.rst b/docs/classes/search.rst new file mode 100644 index 00000000..67f86fa8 --- /dev/null +++ b/docs/classes/search.rst @@ -0,0 +1,5 @@ +Search +=================== + +.. autoclass:: frameioclient.Search + :members: diff --git a/docs/classes/sharing.rst b/docs/classes/sharing.rst new file mode 100644 index 00000000..3bff7927 --- /dev/null +++ b/docs/classes/sharing.rst @@ -0,0 +1,8 @@ +Sharing +=================== + +.. autoclass:: frameioclient.PresentationLink + :members: + +.. autoclass:: frameioclient.ReviewLink + :members: \ No newline at end of file diff --git a/docs/classes/teams.rst b/docs/classes/teams.rst new file mode 100644 index 00000000..23c34eb9 --- /dev/null +++ b/docs/classes/teams.rst @@ -0,0 +1,5 @@ +Teams +=================== + +.. autoclass:: frameioclient.Team + :members: diff --git a/docs/classes/users.rst b/docs/classes/users.rst new file mode 100644 index 00000000..77ed12d1 --- /dev/null +++ b/docs/classes/users.rst @@ -0,0 +1,5 @@ +Users +=================== + +.. autoclass:: frameioclient.User + :members: \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..73cc4d39 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,62 @@ +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + +import frameioclient + +PACKAGE_TITLE = 'Frame.io Python SDK' +PACKAGE_NAME = 'frameioclient' +PACKAGE_DIR = '../frameioclient' +AUTHOR_NAME = 'Frame.io' + +try: + RELEASE = frameioclient.ClientVersion.version() +except AttributeError: + RELEASE = 'unknown' + +version = RELEASE.split('.')[0] + +# -- Project information ----------------------------------------------------- + +project = PACKAGE_TITLE +copyright = 'MIT License 2022, Frame.io' +author = AUTHOR_NAME + +# The full version, including alpha/beta/rc tags +release = RELEASE + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx.ext.githubpages', + 'sphinxcontrib.restbuilder', + 'sphinx_jekyll_builder', + 'sphinx_autodoc_typehints' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'build/*', 'examples/*', 'tests/*', '*.cfg', '.vscode/*', '.github/*', '.circleci/*', '.pytest_cache/*', 'dist/*'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'furo' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..9da0551d --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,35 @@ +Welcome to Frame.io's Python SDK documentation! +=============================================== + +.. toctree:: + :maxdepth: 3 + :caption: Contents: + +.. warning:: + This sample documentation was generated on |today|, and is rebuilt weekly. + + +FrameioClient +=================== +.. automodule:: frameioclient.FrameioClient + :inherited-members: + + +Classes +=========== +.. toctree:: + classes/index + + +Modules +=========== +.. toctree:: + modules/index + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 00000000..7ee3d525 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,37 @@ +=============== +frameioclient +=============== + +.. toctree:: + :hidden: + + installation + +Installation +============ + +Stable releases of frameioclient can be installed with + +.. code-block:: sh + + pip <- or you may download a `.tgz` source + +archive from `pypi `_. +See the :doc:`installation` page for more detailed instructions. + +If you want to use the latest code, you can grab it from our +`Git repository `_, or `fork it `_. + +Usage +=================================== + +Authorization +------------- + +Frame.io Python SDK documentation: `Personal Access Tokens `_. + + +.. code-block:: python + + from frameioclient import FrameioClient + client = FrameioClient(token='my-token') diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..2119f510 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/modules/downloader.rst b/docs/modules/downloader.rst new file mode 100644 index 00000000..2c213f92 --- /dev/null +++ b/docs/modules/downloader.rst @@ -0,0 +1,8 @@ +FrameioDownloader +=================== + +.. autoclass:: frameioclient.FrameioDownloader + :members: + :private-members: + :inherited-members: + :undoc-members: diff --git a/docs/modules/helpers.rst b/docs/modules/helpers.rst new file mode 100644 index 00000000..9f8c2b4e --- /dev/null +++ b/docs/modules/helpers.rst @@ -0,0 +1,9 @@ +FrameioHelpers +========================= + +.. autoclass:: frameioclient.FrameioHelpers + :members: + :private-members: + :inherited-members: + :undoc-members: + diff --git a/docs/modules/index.rst b/docs/modules/index.rst new file mode 100644 index 00000000..ed40d893 --- /dev/null +++ b/docs/modules/index.rst @@ -0,0 +1,8 @@ +Modules +===================== + +.. toctree:: + downloader + uploader + helpers + utils diff --git a/docs/modules/uploader.rst b/docs/modules/uploader.rst new file mode 100644 index 00000000..385dfe1c --- /dev/null +++ b/docs/modules/uploader.rst @@ -0,0 +1,8 @@ +FrameioUploader +=================== + +.. autoclass:: frameioclient.FrameioUploader + :members: + :private-members: + :inherited-members: + :undoc-members: diff --git a/docs/modules/utils.rst b/docs/modules/utils.rst new file mode 100644 index 00000000..cfc24284 --- /dev/null +++ b/docs/modules/utils.rst @@ -0,0 +1,8 @@ +Utils +=================== + +.. autoclass:: frameioclient.Utils + :members: + :private-members: + :inherited-members: + :undoc-members: diff --git a/docs/publish.py b/docs/publish.py new file mode 100644 index 00000000..4df3d17d --- /dev/null +++ b/docs/publish.py @@ -0,0 +1,190 @@ +import os +import hashlib +import frontmatter +import contentful_management + +TOKEN = os.getenv("CONTENTFUL_TOKEN") +SPACE_ID = os.getenv("CONTENTFUL_SPACE_ID") +SDK_ID = os.getenv("CONTENTFUL_SDK_ID") + +docs_path = "./dist/jekyll/api" + + +def transform_path(path): + # The paths generated automatically need modifying. + # This function should be localized to each SDK. + + if path == '/api-frameioclient': + new_path = 'package' + else: + new_path = path.split('/api-frameioclient-')[1].lower() + + return new_path + + +def transform_title(docname): + if docname == 'api/frameioclient': + new_title = 'Frame.io Python SDK' + else: + print(docname) + new_title = docname.split('.')[1].title() + + return new_title + + +def load_local(directory): + # Load in the local docs + docs_data = list() + files = os.listdir(directory) + for fn in files: + fpath = os.path.join(directory, fn) + with open(fpath) as f: + post = frontmatter.load(f) + post['path'] = transform_path(post['path']) + post['title'] = transform_title(post['docname']) + docs_data.append(post) + + return docs_data + + +def load_remote(): + # Create the client + client = contentful_management.Client(TOKEN) + + # Grab all the autoDocs + autoDoc = client.content_types(SPACE_ID, 'master').find('autoDoc') + entries = autoDoc.entries().all() + + # Filter out the ones that aren't the right programming language + relevant_docs = list() + for entry in entries: + # entry = autoDoc.entries().find(entry.id) + entry.sys['locale'] = 'en-US' + sdk = entry.programming_language.id + if sdk == SDK_ID: + relevant_docs.append(entry) + + return relevant_docs + + +def hash_content(content): + # Returns an SHA-256 hash of the stringified content provided + hash_object = hashlib.sha256(bytes(content, 'utf-8')) + sha256 = hash_object.hexdigest() + return sha256 + + +def update_doc(): + pass + + +def publish_new_docs(docs, publish=False): + client = contentful_management.Client(TOKEN) + + for new_entry in docs: + entry_attributes = { + 'content_type_id': 'autoDoc', + 'fields': { + 'title': { + 'en-US': new_entry['title'] + }, + 'slug': { + 'en-US': new_entry['slug'] + }, + 'content': { + 'en-US': new_entry['content'] + }, + 'programmingLanguage': { + 'en-US': { + 'sys': { + "id": SDK_ID, + "type": "Link", + "linkType": "Entry" + } + } + } + } + } + + new_entry = client.entries(SPACE_ID, 'master').create( + attributes=entry_attributes + ) + + # Only publish the new stuff is `publish=True` + if publish == True: + new_entry.publish() + + print(f"Submitted {entry_attributes['fields']['title']}") + + print("Done submitting") + + +def compare_docs(local, remote): + # Compare the remote docs and the local docs + + # Enrich local docs + enriched_local = dict() + for doc in local: + # print(doc.keys()) + enriched_local[hash_content(doc.content)] = { + "date": doc['date'], + "title": doc['title'], + "slug": doc['path'], + "content": doc.content, + "hash": hash_content(doc.content) + } + + # Enrich remote docs + enriched_remote = dict() + for doc in remote: + # print(doc.fields()) + enriched_remote[hash_content(doc.fields()['content'])] = { + "date": doc.sys['updated_at'], + "title": doc.fields()['title'], + "slug": doc.fields()['slug'], + "content": doc.fields()['content'], + "hash": hash_content(doc.fields()['content']) + } + + + # Compare titles and content hashes, update only ones in which the hashes are different + + # Declare our now list that we'll be appending to shortly + docs_to_update = list() + docs_to_maybe_publish = list() + docs_to_definitely_publish = list() + + # Iterate over keys + for doc_hash in enriched_local.keys(): + # If key found in remote keys, skip it + if doc_hash in enriched_remote.keys(): + print(f"Local and remote match for {enriched_remote[doc_hash]['title']}, skipping...") + continue + else: + docs_to_maybe_publish.append(enriched_local[doc_hash]) + + # return docs_to_update, docs_to_publish + return docs_to_maybe_publish + + +def main(): + # Grab the remote docs + remote_docs = load_remote() + + # Grab the local docs + local_docs = load_local(docs_path) + + # docs_to_update, docs_to_publish = compare_docs(local=local_docs, remote=remote_docs) + docs_to_publish = compare_docs(local=local_docs, remote=remote_docs) + + # Publish those docs! + publish_new_docs(docs_to_publish) + + # Iterate over the new docs and if + # for doc in new_docs: + # # print(doc.content) + # print(doc.keys()) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..f5f3d0a0 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,12 @@ +sphinx +sphinx-jekyll-builder +sphinxcontrib-restbuilder +contentful_management +python-frontmatter +frameioclient +xxhash +furo +analytics-python +token-bucket +sphinx-autobuild +sphinx-autodoc-typehints \ No newline at end of file diff --git a/examples/asset_scraper.py b/examples/asset_scraper.py deleted file mode 100644 index cd37b732..00000000 --- a/examples/asset_scraper.py +++ /dev/null @@ -1,189 +0,0 @@ -################################### -# This scraper shows you how to gather assets from -# a Frame.io account and write to a CSV. -# Assets are gathered recursively from each -# team's projects. Folders, files and version stacks are written to the CSV. -# Note: Debug statements are left in the file and commented out. -################################### - - -import csv -import os -import time -from itertools import chain - -from frameioclient import FrameioClient - - -class ClientNotTokenized(Exception): - pass - - -class RootAssetIDNotFound(Exception): - pass - - -def get_teams_from_account(client): - """ - Builds a list of teams for the account. Note: the API offers two strategies to fetch an account's teams, - `'get_teams`` and `get_all_teams`. Using `get_teams` we'll pull only the teams owned by the account_id, - disregarding teams the user belongs to but does not own. More info: https://docs.frame.io/docs/directory-lists-and-file-trees#2-fetch-the-accounts-teams - """ - acct = client.get_me() - acct_id = acct['account_id'] - return client.get_teams(acct_id) - -def get_projects_from_team(client, team): - """Returns a list of projects for a team.""" - projects_in_team = [] - data = client.get_projects(team.get('id')) - team_name = team.get('name') - - for proj in data: - # Add project_name and team_name to the dict - proj['project_name'] = proj.get('name') - proj['team_name'] = team_name - # print('Debug: Found project: {}'.format(proj['project_name'])) - projects_in_team.append(proj) - # print('Debug: projects in team now {}'.format(len(projects_in_team))) - - return projects_in_team - -def get_projects_from_account(client): - """Gets projects from all teams in the account.""" - projects_in_account = [] - teams = get_teams_from_account(client) - - for team in teams: - team_name = team.get('name') - # print('Debug: === Found team: {} ==='.format(team_name)) - projects_in_team = (get_projects_from_team(client, team)) - projects_in_account.extend(projects_in_team) - # print('Debug: projects in account now: {}'.format(len(projects_in_account))) - - return projects_in_account - -def scrape_asset_data_from_projects(client, projects): - """ - Scrapes the asset data for an authenticated client and provided list of projects. - Returns a list of asset metadata for all assets contained in the project. - """ - assets_in_projects = [] - for project in projects: - assets_in_project = [] - proj_root_asset_id = project.get('root_asset_id') - assets_in_project = scrape_asset_data(client, proj_root_asset_id, assets_in_project) - assets_in_projects.extend(assets_in_project) - # print('Debug: total assets collected from projects: {}'.format(len(assets_in_projects))) - - for asset in assets_in_project: - # TODO: Repeats code from earlier and really shouldn't - asset['project_name'] = project.get('project_name') - asset['team_name'] = project.get('name') - - return assets_in_projects - - -def scrape_asset_data(client, asset_id, asset_list): - """ - Takes an initialized client and an asset_id representing a position in a directory tree. - Recursively builds a list of assets within the tree. Returns a list of dicts. - """ - assets = client.get_asset_children(asset_id) - - for asset in assets: - # Recurse through folders but skip the empty ones - if asset['type'] == "folder" and asset != []: - # Include non-empty folders in the list of scraped assets - asset_list.append(asset) - scrape_asset_data(client, asset['id'], asset_list) - - if asset['type'] == "file": - asset_list.append(asset) - - if asset['type'] == "version_stack": - # Read about version stacks: https://docs.frame.io/docs/managing-version-stacks - versions = client.get_asset_children(asset['id']) - asset_list.append(asset) - for v_asset in versions: - asset_list.append(v_asset) - - return asset_list - -def flatten_dict(d): - """ - Use this helper functon to flatten a dict holding API response data - and namespace the attributes. - """ - - def expand(key, val): - if isinstance(val, dict): - return [ (key + '.' + k, v) for k, v in flatten_dict(val).items() ] - else: - return [ (key, val) ] - - items = [ item for k, v in d.items() for item in expand(k, v)] - - return dict(items) - -def write_assets_to_csv(asset_list, filename): - """ - Writes assets to assets.csv - Any attributes you add to the headers list will automatically be written to the CSV - The API returns many attributes so familiarize with the response data! - """ - headers = [ - 'id', - 'name', - 'type', - 'inserted_at', - 'item_count', - 'comment_count', - 'filesize', - 'shared', - 'private', - 'versions', - 'parent_id', - 'project_name', - 'project_id', - 'team_name', - 'creator.name', - 'creator.email', - ] - - # Flattening the assets dicts is not necessary but namespaces the CSV headers nicely. - flat_assets_list = [] - for a in asset_list: - flat_assets_list.append(flatten_dict(a)) - - with open('asset_record_for_account_id-{}'.format(filename), 'w') as f: - f_csv = csv.DictWriter(f, headers, extrasaction='ignore') - f_csv.writeheader() - f_csv.writerows(flat_assets_list) - - return - -if __name__ == '__main__': - - - TOKEN = os.getenv('FRAME_IO_TOKEN') - if os.environ.get('FRAME_IO_TOKEN') == None: - raise ClientNotTokenized('The Python SDK requires a valid developer token.') - ROOT_ASSET_ID = os.getenv('ROOT_ASSET_ID') - if os.environ.get('ROOT_ASSET_ID') == None: - raise RootAssetIDNotFound('If you don\'t know what Root Asset ID is, read this guide: https://docs.frame.io/docs/root-asset-ids') - - # Initialize the client library - client = FrameioClient(TOKEN) - - # Gather all assets in the account - projects = get_projects_from_account(client) - assets_in_account = scrape_asset_data_from_projects(client, projects) - - # Pass a filename to the .csv writer so we can explicitly ID the file - acct = client.get_me() - acct_id = acct['account_id'] - filename = 'assets_for_account_id-{}.csv'.format(acct_id) - - # Write the .csv - write_assets_to_csv(assets_in_account, filename) \ No newline at end of file diff --git a/examples/assets/asset_scraper.py b/examples/assets/asset_scraper.py new file mode 100644 index 00000000..082d8595 --- /dev/null +++ b/examples/assets/asset_scraper.py @@ -0,0 +1,217 @@ +################################### +# This scraper shows you how to gather assets from +# a Frame.io account and write to a CSV. +# Assets are gathered recursively from each +# team's projects. Folders, files and version stacks are written to the CSV. +# Note: Debug statements are left in the file and commented out. +################################### + + +import csv +from functools import lru_cache +import os +import time +from itertools import chain +from typing import Dict, List + +from frameioclient import FrameioClient + + +class ClientNotTokenized(Exception): + pass + + +class RootAssetIDNotFound(Exception): + pass + + +@lru_cache() +def get_teams_from_account(client: FrameioClient) -> Dict: + """ + Builds a list of teams for the account. Note: the API offers two strategies to fetch an account's teams, + `'get_teams`` and `get_all_teams`. Using `get_teams`, we'll pull only the teams owned by the account_id, + disregarding teams the user belongs to but does not own. More info: https://docs.frame.io/docs/directory-lists-and-file-trees#2-fetch-the-accounts-teams + """ + acct = client.users.get_me() + acct_id = acct["account_id"] + team_name_kv = dict() + for team in client.teams.list(acct_id): + team_name_kv[team["id"]] = team["name"] + return team_name_kv + + +def get_projects_from_team( + client: FrameioClient, team_id: str, team_name: str +) -> List[Dict]: + """Returns a list of projects for a team.""" + projects_in_team = [] + data = client.teams.list_projects(team_id) + + for proj in data: + # Add project_name and team_name to the dict + proj["project_name"] = proj.get("name") + proj["team_name"] = team_name + print(f"Debug: Found project: {proj['project_name']}") + projects_in_team.append(proj) + print(f"Debug: projects in team now: {len(projects_in_team)}") + + return projects_in_team + + +def get_projects_from_account(client) -> List[Dict]: + """Gets projects from all teams in the account.""" + projects_in_account = [] + teams = get_teams_from_account(client) + + for team_id, team_name in teams.items(): + print(f"Debug: === Found team: {team_name} ===") + projects_in_team = get_projects_from_team(client, team_id, team_name) + projects_in_account.extend(projects_in_team) + print(f"Debug: projects in account now: {len(projects_in_account)}") + + return projects_in_account + + +def scrape_asset_data_from_projects( + client: FrameioClient, projects: List[Dict] +) -> List[Dict]: + """ + Scrapes the asset data for an authenticated client and provided list of projects. + Returns a list of asset metadata for all assets contained in the project. + """ + assets_in_projects = [] + for project in projects: + print(f"Debug: Scanning project: {project['name']} for assets") + assets_in_project = [] + proj_root_asset_id = project.get("root_asset_id") + assets_in_project = scrape_asset_data( + client, + proj_root_asset_id, + assets_in_project, + project["name"], + ) + assets_in_projects.extend(assets_in_project) + print( + "Debug: total assets collected from projects: {}".format( + len(assets_in_projects) + ) + ) + + return assets_in_projects + + +def scrape_asset_data( + client: FrameioClient, + asset_id: str, + asset_list: List[Dict], + project_name: str, +) -> List[Dict]: + """ + Takes an initialized client and an asset_id representing a position in a directory tree. + Recursively builds a list of assets within the tree. Returns a list of dicts. + """ + assets = client.assets.get_children(asset_id) + + for asset in assets: + # Recurse through folders but skip the empty ones + if asset["type"] == "folder" and asset != []: + # Include non-empty folders in the list of scraped assets + asset_list.append(asset) + scrape_asset_data(client, asset["id"], asset_list, project_name) + + if asset["type"] == "file": + asset_list.append(asset) + + if asset["type"] == "version_stack": + # Read about version stacks: https://docs.frame.io/docs/managing-version-stacks + versions = client.assets.get_children(asset["id"]) + asset_list.append(asset) + for v_asset in versions: + asset_list.append(v_asset) + + asset["project_name"] = project_name + asset["team_name"] = get_teams_from_account(client)[asset["team_id"]] + + return asset_list + + +def flatten_dict(d) -> List[Dict]: + """ + Use this helper functon to flatten a dict holding API response data + and namespace the attributes. + """ + + def expand(key, val): + if isinstance(val, dict): + return [(key + "." + k, v) for k, v in flatten_dict(val).items()] + else: + return [(key, val)] + + items = [item for k, v in d.items() for item in expand(k, v)] + + return dict(items) + + +def write_assets_to_csv(asset_list: List[Dict], filename: str) -> None: + """ + Writes assets to assets.csv + Any attributes you add to the headers list will automatically be written to the CSV + The API returns many attributes so familiarize with the response data! + """ + headers = [ + "id", + "name", + "type", + "inserted_at", + "item_count", + "comment_count", + "filesize", + "shared", + "private", + "versions", + "parent_id", + "project_name", + "project_id", + "team_name", + "creator.name", + "creator.email", + ] + + # Flattening the assets dicts is not necessary but namespaces the CSV headers nicely. + flat_assets_list = [] + for a in asset_list: + flat_assets_list.append(flatten_dict(a)) + + with open(f"asset_record_for_account_id-{filename}", "w") as f: + f_csv = csv.DictWriter(f, headers, extrasaction="ignore") + f_csv.writeheader() + f_csv.writerows(flat_assets_list) + + return + + +if __name__ == "__main__": + TOKEN = os.getenv("FRAME_IO_TOKEN") + ROOT_ASSET_ID = os.getenv("ROOT_ASSET_ID") + + if TOKEN == None: + raise ClientNotTokenized("The Python SDK requires a valid developer token.") + + if ROOT_ASSET_ID == None: + raise RootAssetIDNotFound( + "If you don't know what Root Asset ID is, read this guide: https://docs.frame.io/docs/root-asset-ids" + ) + + # Initialize the client library + client = FrameioClient(TOKEN) + + # Gather all assets in the account + projects = get_projects_from_account(client) + assets_in_account = scrape_asset_data_from_projects(client, projects) + + # Pass a filename to the .csv writer so we can explicitly ID the file + acct = client.users.get_me() + acct_id = acct["account_id"] + + # Write the .csv + write_assets_to_csv(assets_in_account, acct_id) diff --git a/examples/assets/asset_tree.py b/examples/assets/asset_tree.py new file mode 100644 index 00000000..8dcafac9 --- /dev/null +++ b/examples/assets/asset_tree.py @@ -0,0 +1,33 @@ +import os + +import pdb +from time import time +from pprint import pprint +from frameioclient import FrameioClient + +def demo_folder_tree(project_id, slim): + TOKEN = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(TOKEN) + + start_time = time() + tree = client.projects.tree(project_id, slim) + + end_time = time() + elapsed = round((end_time - start_time), 2) + + item_count = len(tree) + pprint(tree) + # pdb.set_trace() + + print(f"Found {item_count} items") + print(f"Took {elapsed} second to fetch the slim payload for project: {project_id}") + print("\n") + +if __name__ == "__main__": + project_id = '2dfb6ce6-90d8-4994-881f-f02cd94b1c81' + # project_id='e2845993-7330-54c6-8b77-eafbd5144eac' + demo_folder_tree(project_id, slim=True) + # demo_folder_tree(project_id, slim=False) + +# 445 seconds for slim +# 509 seconds for non-slim \ No newline at end of file diff --git a/examples/assets/recursive_upload.py b/examples/assets/recursive_upload.py new file mode 100644 index 00000000..462998e5 --- /dev/null +++ b/examples/assets/recursive_upload.py @@ -0,0 +1,93 @@ +import os +import mimetypes +import concurrent.futures + +from frameioclient import FrameioClient +from pprint import pprint + +global file_num +file_num = 0 + +global file_count +file_count = 0 + +def create_n_upload(task): + client=task[0] + file_p=task[1] + parent_asset_id=task[2] + abs_path = os.path.abspath(file_p) + file_s = os.path.getsize(file_p) + file_n = os.path.split(file_p)[1] + file_mime = mimetypes.guess_type(abs_path)[0] + + asset = client.assets.create( + parent_asset_id=parent_asset_id, + name=file_n, + type="file", + filetype=file_mime, + filesize=file_s + ) + + with open(abs_path, "rb") as ul_file: + asset_info = client.assets._upload(asset, ul_file) + + return asset_info + + +def create_folder(folder_n, parent_asset_id): + asset = client.assets.create( + parent_asset_id=parent_asset_id, + name=folder_n, + type="folder", + ) + + return asset['id'] + + +def file_counter(root_folder): + matches = [] + for root, dirnames, filenames in os.walk(root_folder): + for filename in filenames: + matches.append(os.path.join(filename)) + + return matches + + +def recursive_upload(client, folder, parent_asset_id): + # Seperate files and folders: + file_list = list() + folder_list = list() + + for item in os.listdir(folder): + if item == ".DS_Store": # Ignore .DS_Store files on Mac + continue + + complete_item_path = os.path.join(folder, item) + + if os.path.isfile(complete_item_path): + file_list.append(item) + else: + folder_list.append(item) + + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + for file_p in file_list: + global file_num + file_num += 1 + print(f"Starting {file_num}/{file_count}") + complete_dir_obj = os.path.join(folder, file_p) + task = (client, complete_dir_obj, parent_asset_id) + executor.submit(create_n_upload, task) + + for folder_i in folder_list: + new_folder = os.path.join(folder, folder_i) + new_parent_asset_id = create_folder(folder_i, parent_asset_id) + recursive_upload(client, new_folder, new_parent_asset_id) + + +if __name__ == "__main__": + root_folder = "./test_structure" + parent_asset_id = "PARENT_ASSET_ID" + client = FrameioClient(os.getenv("FRAME_IO_TOKEN")) + + file_count = len(file_counter(root_folder)) + recursive_upload(client, root_folder, parent_asset_id) \ No newline at end of file diff --git a/examples/upload_asset.py b/examples/assets/upload_asset.py similarity index 100% rename from examples/upload_asset.py rename to examples/assets/upload_asset.py diff --git a/examples/comment_scraper.py b/examples/comments/comment_scraper.py similarity index 95% rename from examples/comment_scraper.py rename to examples/comments/comment_scraper.py index 182db0e4..4504eef2 100644 --- a/examples/comment_scraper.py +++ b/examples/comments/comment_scraper.py @@ -33,7 +33,7 @@ def build_comments_list(client, asset_id, comment_list): build_comments_list(client, asset['id'], comment_list) if asset.get('type') == 'file' and asset.get('comment_count') > 0: - comments = client.comments.get(asset['id']) + comments = client.comments.list(asset['id']) for comment in comments: # The 'get_comments" call won't return the asset name # So we'll add it to the dictionary now. @@ -44,7 +44,7 @@ def build_comments_list(client, asset_id, comment_list): # Read about version stacks: https://docs.frame.io/docs/managing-version-stacks versions = client.assets.get_children(asset['id']) for v_asset in versions: - comments = client.comments.get(v_asset['id']) + comments = client.comments.list(v_asset['id']) for comment in comments: comment['asset'] = { 'name': asset['name'] } comment_list.append(comment) @@ -103,4 +103,4 @@ def write_comments_csv(c_list): comments_list = build_comments_list(client, ROOT_ASSET_ID, comments) # Write the comments to comments.csv - write_comments_csv(comments_list) \ No newline at end of file + write_comments_csv(comments_list) diff --git a/examples/comments/range_based_comment.py b/examples/comments/range_based_comment.py new file mode 100644 index 00000000..61cb7331 --- /dev/null +++ b/examples/comments/range_based_comment.py @@ -0,0 +1,17 @@ +import os +from frameioclient import FrameioClient + +def leave_range_based_comment(asset_id, comment): + client = FrameioClient(os.getenv("FRAME_IO_TOKEN")) + res = client.comments.create( + asset_id=asset_id, + text="This is my range based comment", + timestamp=1911, + duration=3.5 + ) + + print(res) + + +if __name__ == "__main__": + leave_range_based_comment("id", "this is my comment!") \ No newline at end of file diff --git a/examples/download_asset.py b/examples/download_asset.py deleted file mode 100644 index 4e4e12f5..00000000 --- a/examples/download_asset.py +++ /dev/null @@ -1,15 +0,0 @@ -import os -from frameioclient import FrameioClient - -def benchmark(asset_id): - token = os.getenv("FRAMEIO_TOKEN") - client = FrameioClient(token) - asset_info = client.assets.get(asset_id) - accelerated_filename = client.download(asset_info, "downloads", prefix="accelerated_", multi_part=True, concurrency=20) - - # print("Normal speed: {}, Accelerated speed: {}".format(normal_speed, accelerated_speed)) - -if __name__ == "__main__": - # download_file("60ff4cca-f97b-4311-be24-0eecd6970c01") - benchmark("20a1df34-e8ad-48fd-b455-c68294cc7f71") - # benchmark("9cee7966-7db1-4066-b326-f9e6f5e929e4") \ No newline at end of file diff --git a/examples/projects/download_project.py b/examples/projects/download_project.py new file mode 100644 index 00000000..fd492244 --- /dev/null +++ b/examples/projects/download_project.py @@ -0,0 +1,47 @@ +from frameioclient.lib.utils import FormatTypes, Utils +import os +from pathlib import Path + +import pdb +from time import time,sleep +from pprint import pprint +from frameioclient import FrameioClient + +def get_folder_size(path='.'): + total = 0 + for entry in os.scandir(path): + if entry.is_file(): + total += entry.stat().st_size + elif entry.is_dir(): + total += get_folder_size(entry.path) + return total + +def demo_project_download(project_id): + TOKEN = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(TOKEN) + + start_time = time() + download_dir = '/Volumes/Jeff-EXT/Python Transfer Test' + item_count = client.projects.download(project_id, destination_directory=download_dir) + + # item_count = client.projects.download(project_id, destination_directory='/Users/jeff/Temp/Transfer vs Python SDK/Python SDK') + + end_time = time() + elapsed = round((end_time - start_time), 2) + + + folder_size = get_folder_size(download_dir) + # pdb.set_trace() + + print(f"Found {item_count} items") + print(f"Took {elapsed} second to download {Utils.format_value(folder_size, type=FormatTypes.SIZE)} for project: {client.projects.get(project_id)['name']}") + print("\n") + +if __name__ == "__main__": + # project_id = '2dfb6ce6-90d8-4994-881f-f02cd94b1c81' + # project_id='e2845993-7330-54c6-8b77-eafbd5144eac' + # project_id = '5d3ff176-ab1f-4c0b-a027-abe3d2a960e3' + project_id = 'ba1791e8-bf1e-46cb-bcad-5e4bb6431a08' + demo_project_download(project_id) + +# Took 443.84 second to download 12.43 GB to USB HDD for project: HersheyPark Summer Campaign using Python SDK \ No newline at end of file diff --git a/examples/projects/project_tree.py b/examples/projects/project_tree.py new file mode 100644 index 00000000..0f4f6450 --- /dev/null +++ b/examples/projects/project_tree.py @@ -0,0 +1,27 @@ +import os + +from time import time +from pprint import pprint +from frameioclient import FrameioClient + +def demo_folder_tree(project_id): + TOKEN = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(TOKEN) + + start_time = time() + tree = client.helpers.build_project_tree(project_id, slim=True) + + end_time = time() + elapsed = round((end_time - start_time), 2) + + item_count = len(tree) + pprint(tree) + + print(f"Found {item_count} items") + print(f"Took {elapsed} second to fetch the slim payload for project: {project_id}") + print("\n") + +if __name__ == "__main__": + # project_id = 'ba1791e8-bf1e-46cb-bcad-5e4bb6431a08' + project_id = '2dfb6ce6-90d8-4994-881f-f02cd94b1c81' + demo_folder_tree(project_id) diff --git a/examples/get_me.py b/examples/users/get_me.py similarity index 100% rename from examples/get_me.py rename to examples/users/get_me.py diff --git a/examples/invite_users.py b/examples/users/invite_users.py similarity index 100% rename from examples/invite_users.py rename to examples/users/invite_users.py diff --git a/examples/user_management.py b/examples/users/user_management.py similarity index 100% rename from examples/user_management.py rename to examples/users/user_management.py diff --git a/frameioclient/__init__.py b/frameioclient/__init__.py index 2fc90a3b..04bcc1e2 100644 --- a/frameioclient/__init__.py +++ b/frameioclient/__init__.py @@ -1,3 +1,3 @@ +from .lib import * +from .services import * from .client import FrameioClient -from .service import * -from .lib import * \ No newline at end of file diff --git a/frameioclient/client.py b/frameioclient/client.py index 7706706b..fefbbe40 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -1,130 +1,92 @@ -import re -import sys -import requests -from requests.adapters import HTTPAdapter -from requests.packages.urllib3.util.retry import Retry - -from .lib import ClientVersion, PaginatedResponse, Utils, ClientVersion, FrameioDownloader - - -class FrameioConnection(object): - def __init__(self, token, host='https://api.frame.io'): - self.token = token - self.host = host - self.retry_strategy = Retry( - total=3, - backoff_factor=1, - status_forcelist=[429], - method_whitelist=["POST", "OPTIONS", "GET"] - ) - self.client_version = ClientVersion.version() - self.headers = Utils.format_headers(self.token, self.client_version) - - self.adapter = HTTPAdapter(max_retries=self.retry_strategy) - self.session = requests.Session() - self.session.mount("https://", self.adapter) - - def _api_call(self, method, endpoint, payload={}, limit=None): - url = '{}/v2{}'.format(self.host, endpoint) - - r = self.session.request( - method, - url, - json=payload, - headers=self.headers, - ) - - if r.ok: - if r.headers.get('page-number'): - if int(r.headers.get('total-pages')) > 1: - return PaginatedResponse( - results=r.json(), - limit=limit, - page_size=r.headers['per-page'], - total_pages=r.headers['total-pages'], - total=r.headers['total'], - endpoint=endpoint, - method=method, - payload=payload, - client=self - ) - if isinstance(r.json(), list): - return r.json()[:limit] - return r.json() - - if r.status_code == 422 and "presentation" in endpoint: - raise PresentationException - - return r.raise_for_status() - - def get_specific_page(self, method, endpoint, payload, page): - """ - Gets a specific page for that endpoint, used by Pagination Class - - :Args: - method (string): 'get', 'post' - endpoint (string): endpoint ('/accounts//teams') - payload (dict): Request payload - page (int): What page to get - """ - if method == 'get': - endpoint = '{}?page={}'.format(endpoint, page) - return self._api_call(method, endpoint) - - if method == 'post': - payload['page'] = page - return self._api_call(method, endpoint, payload=payload) - - -class FrameioClient(FrameioConnection): - @property - def _auth(self): - return self.token - - @property - def _version(self): - return ClientVersion.version() - - @property - def _download(self): - return FrameioDownloader - - @property - def users(self): - from .service import User - return User(self) - - @property - def assets(self): - from .service import Asset - return Asset(self) - - @property - def comments(self): - from .service import Comment - return Comment(self) - - @property - def logs(self): - from .service import AuditLogs - return AuditLogs(self) - - @property - def review_links(self): - from .service import ReviewLink - return ReviewLink(self) - - @property - def presentation_links(self): - from .service import PresentationLink - return PresentationLink(self) - - @property - def projects(self): - from .service import Project - return Project(self) - - @property - def teams(self): - from .service import Team - return Team(self) +""" +client.py +==================================== +The core module of the frameioclient +""" + +from .config import Config +from .lib import APIClient, ClientVersion, FrameioDownloader + +# from .lib import Telemetry +from .services import * + + +class FrameioClient(APIClient): + def __init__( + self, + token: str, + host: str = Config.api_host, + threads: int = Config.default_concurrency, + progress=False, + ): + super().__init__(token, host, threads, progress) + + @property + def me(self): + return self.users.get_me() + + # def telemetry(self): + # return Telemetry(self) + + def _auth(self): + return self.token + + def _version(self): + return ClientVersion.version() + + def _download(self): + return FrameioDownloader(self) + + @property + def users(self): + from .services import User + + return User(self) + + @property + def assets(self): + from .services import Asset + + return Asset(self) + + @property + def comments(self): + from .services import Comment + + return Comment(self) + + @property + def logs(self): + from .services import AuditLogs + + return AuditLogs(self) + + @property + def review_links(self): + from .services import ReviewLink + + return ReviewLink(self) + + @property + def presentation_links(self): + from .services import PresentationLink + + return PresentationLink(self) + + @property + def projects(self): + from .services import Project + + return Project(self) + + @property + def teams(self): + from .services import Team + + return Team(self) + + @property + def helpers(self): + from .services import FrameioHelpers + + return FrameioHelpers(self) diff --git a/frameioclient/config.py b/frameioclient/config.py new file mode 100644 index 00000000..8c733c8d --- /dev/null +++ b/frameioclient/config.py @@ -0,0 +1,4 @@ +class Config: + api_host = "https://api.frame.io" + default_page_size = 50 + default_concurrency = 5 diff --git a/frameioclient/fiocli.py b/frameioclient/fiocli.py new file mode 100644 index 00000000..13486ff5 --- /dev/null +++ b/frameioclient/fiocli.py @@ -0,0 +1,94 @@ +import json +import os +import sys +import argparse + +from frameioclient import FrameioClient + + +def main(): + parser = argparse.ArgumentParser( + prog="fiocli", description="Frame.io Python SDK CLI" + ) + + ## Define args + parser.add_argument( + "--token", + action="store", + metavar="token", + type=str, + nargs="+", + help="Developer Token", + ) + parser.add_argument( + "--target", + action="store", + metavar="target", + type=str, + nargs="+", + help="Target: remote project or folder, or alternatively a local file/folder", + ) + parser.add_argument( + "--destination", + action="store", + metavar="destination", + type=str, + nargs="+", + help="Destination: remote project or folder, or alternatively a local file/folder", + ) + parser.add_argument( + "--threads", + action="store", + metavar="threads", + type=int, + nargs="+", + help="Number of threads to use", + ) + + ## Parse args + args = parser.parse_args() + + if args.threads: + threads = args.threads[0] + else: + threads = 5 + + ## Handle args + if args.token: + client = None + # print(args.token) + try: + client = FrameioClient(args.token[0], progress=True, threads=threads) + except Exception as e: + print("Failed") + print(e) + sys.exit(1) + + # If args.op == 'upload': + if args.target: + if args.destination: + # Check to see if this is a local target and thus a download + if os.path.isdir(args.destination[0]): + try: + asset = client.assets.get(args.target[0]) + return client.assets.download( + asset, args.destination[0], progress=True, multi_part=True + ) + except Exception as e: + print(e) + client.projects.download(args.target[0], args.destination[0]) + + else: # This is an upload + if os.path.isdir(args.target[0]): + return client.assets.upload_folder( + args.target[0], args.destination[0] + ) + else: + try: + return json.dumps(client.assets.upload(args.destination[0], args.target[0])) + except Exception as e: + print(e) + else: + print("No destination supplied") + else: + print("No target supplied") diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index a29d8999..0ace68ac 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -1,5 +1,10 @@ -from .download import FrameioDownloader -from .upload import FrameioUploader -from .utils import Utils, PaginatedResponse, KB, MB +from .constants import * from .exceptions import * -from .version import ClientVersion \ No newline at end of file +from .logger import SDKLogger + +# from .telemetry import Telemetry +from .version import ClientVersion +from .upload import FrameioUploader +from .transport import APIClient +from .transfer import AWSClient, FrameioDownloader +from .utils import Utils, PaginatedResponse, KB, MB, ApiReference diff --git a/frameioclient/lib/constants.py b/frameioclient/lib/constants.py new file mode 100644 index 00000000..4b59e029 --- /dev/null +++ b/frameioclient/lib/constants.py @@ -0,0 +1,23 @@ +### Asset Fields ### + +asset_excludes = { + "only_fields": [ + # "a.downloads", + "a.name", + "a.filesize", + "u.name", + "a.is_session_watermarked", + "a.item_count", + "a.creator.name" "a.creator.id", + "a.inserted_at", + "a.original", + "a.upload_completed_at", + ], + "excluded_fields": ["a.checksums", "a.h264_1080_best", "a.source"], + "drop_includes": ["a.trancode_statuses", "a.transcodes", "a.source", "a.checksums"], + "hard_drop_fields": ["a.transcodes", "a.source"], +} + +default_thread_count = 5 + +retryable_statuses = [400, 429, 500, 503] diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 9605d71a..a96dca46 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -1,189 +1,187 @@ -import io import os import math -import time -import requests -import threading -import concurrent.futures +from typing import Dict from .utils import Utils -from .exceptions import DownloadException, WatermarkIDDownloadException, AssetNotFullyUploaded -thread_local = threading.local() +from .logger import SDKLogger +from .transfer import AWSClient + +# from .telemetry import Event, ComparisonTest + +logger = SDKLogger("downloads") + +from .exceptions import ( + DownloadException, + WatermarkIDDownloadException, + AssetNotFullyUploaded, +) + class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5): - self.multi_part = multi_part - self.asset = asset - self.asset_type = None - self.download_folder = download_folder - self.resolution_map = dict() - self.destination = None - self.watermarked = asset['is_session_watermarked'] # Default is probably false - self.file_size = asset["filesize"] - self.concurrency = concurrency - self.futures = list() - self.chunk_size = (25 * 1024 * 1024) # 25 MB chunk size - self.chunks = math.ceil(self.file_size/self.chunk_size) - self.prefix = prefix - self.filename = Utils.normalize_filename(asset["name"]) - - self._evaluate_asset() - - def _evaluate_asset(self): - if self.asset.get("_type") != "file": - raise DownloadException(message="Unsupport Asset type: {}".format(self.asset.get("_type"))) - - if self.asset.get("upload_completed_at") == None: - raise AssetNotFullyUploaded - - def _get_session(self): - if not hasattr(thread_local, "session"): - thread_local.session = requests.Session() - return thread_local.session - - def _create_file_stub(self): - try: - fp = open(self.destination, "w") - # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space - fp.close() - except FileExistsError as e: - print(e) - raise e - return True - - def get_download_key(self): - try: - url = self.asset['original'] - except KeyError as e: - if self.watermarked == True: - resolution_list = list() + def __init__( + self, + asset: Dict, + download_folder: str, + prefix: str, + multi_part: bool = False, + replace: bool = False, + ): + self.multi_part = multi_part + self.asset = asset + self.asset_type = None + self.download_folder = download_folder + self.replace = replace + self.resolution_map = dict() + self.destination = None + self.watermarked = asset["is_session_watermarked"] # Default is probably false + self.filesize = asset["filesize"] + self.futures = list() + self.checksum = None + self.original_checksum = None + self.checksum_verification = True + self.chunk_size = 25 * 1024 * 1024 # 25 MB chunk size + self.chunks = math.ceil(self.filesize / self.chunk_size) + self.prefix = prefix + self.bytes_started = 0 + self.bytes_completed = 0 + self.in_progress = 0 + self.aws_client = None + self.session = None + self.filename = Utils.normalize_filename(asset["name"]) + self.request_logs = list() + self.stats = True + + self._evaluate_asset() + self._get_path() + + def _update_in_progress(self): + self.in_progress = self.bytes_started - self.bytes_completed + return self.in_progress # Number of in-progress bytes + + def get_path(self): + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _evaluate_asset(self): + if self.asset.get("_type") != "file": + raise DownloadException( + message=f"Unsupport Asset type: {self.asset.get('_type')}" + ) + + # This logic may block uploads that were started before this field was introduced + if self.asset.get("upload_completed_at") == None: + raise AssetNotFullyUploaded + try: - for resolution_key, download_url in sorted(self.asset['downloads'].items()): - resolution = resolution_key.split("_")[1] # Grab the item at index 1 (resolution) - try: - resolution = int(resolution) - except ValueError: - continue - - if download_url is not None: - resolution_list.append(download_url) - - # Grab the highest resolution (first item) now - url = resolution_list[0] - except KeyError: - raise DownloadException - else: - raise WatermarkIDDownloadException - - return url - - def get_path(self): - if self.prefix != None: - self.filename = self.prefix + self.filename - - if self.destination == None: - final_destination = os.path.join(self.download_folder, self.filename) - self.destination = final_destination - - return self.destination - - def download_handler(self): - if os.path.isfile(self.get_path()): - print("File already exists at this location.") - return self.destination - else: - url = self.get_download_key() - - if self.watermarked == True: - return self.download(url) - else: - if self.multi_part == True: - return self.multi_part_download(url) + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + def _create_file_stub(self): + try: + fp = open(self.destination, "w") + # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space + fp.close() + except FileExistsError as e: + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + raise e + return True + + def _get_path(self): + logger.info(f"prefix: {self.prefix}") + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _get_checksum(self): + try: + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + return self.original_checksum + + def get_download_key(self): + try: + url = self.asset["original"] + except KeyError as e: + if self.watermarked == True: + resolution_list = list() + try: + for resolution_key, download_url in sorted( + self.asset["downloads"].items() + ): + resolution = resolution_key.split("_")[ + 1 + ] # Grab the item at index 1 (resolution) + try: + resolution = int(resolution) + except ValueError: + continue + + if download_url is not None: + resolution_list.append(download_url) + + # Grab the highest resolution (first item) now + url = resolution_list[0] + except KeyError: + raise DownloadException + else: + raise WatermarkIDDownloadException + + return url + + def download(self): + """Call this to perform the actual download of your asset!""" + + # Check folders + if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): + logger.info("Folder exists, don't need to create it") + else: + logger.info("Destination folder not found, creating") + os.mkdir(self.download_folder) + + # Check files + if os.path.isfile(self.get_path()) == False: + pass + + if os.path.isfile(self.get_path()) and self.replace == True: + os.remove(self.get_path()) + + if os.path.isfile(self.get_path()) and self.replace == False: + logger.info("File already exists at this location.") + return self.destination + + # Get URL + url = self.get_download_key() + + # AWS Client + self.aws_client = AWSClient(downloader=self, concurrency=5) + + # Handle watermarking + if self.watermarked == True: + return self.aws_client._download_whole(url) + else: - return self.download(url) - - def download(self, url): - start_time = time.time() - print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) - - # Downloading - r = requests.get(url) - open(self.destination, "wb").write(r.content) - - download_time = time.time() - start_time - download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) - print("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) - - return self.destination, download_speed - - def multi_part_download(self, url): - start_time = time.time() - - # Generate stub - try: - self._create_file_stub() - - except Exception as e: - raise DownloadException(message=e) - - offset = math.ceil(self.file_size / self.chunks) - in_byte = 0 # Set initially here, but then override - - print("Multi-part download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) - - # Queue up threads - with concurrent.futures.ThreadPoolExecutor(max_workers=self.concurrency) as executor: - for i in range(int(self.chunks)): - out_byte = offset * (i+1) # Increment by the iterable + 1 so we don't mutiply by zero - task = (url, in_byte, out_byte, i) - - time.sleep(0.1) # Stagger start for each chunk by 0.1 seconds - self.futures.append(executor.submit(self.download_chunk, task)) - in_byte = out_byte # Reset new in byte equal to last out byte - - # Wait on threads to finish - for future in concurrent.futures.as_completed(self.futures): - try: - status = future.result() - print(status) - except Exception as exc: - print(exc) - - # Calculate and print stats - download_time = time.time() - start_time - download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) - print("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) - - return self.destination - - def download_chunk(self, task): - # Download a particular chunk - # Called by the threadpool executor - - url = task[0] - start_byte = task[1] - end_byte = task[2] - chunk_number = task[3] - - session = self._get_session() - print("Getting chunk {}/{}".format(chunk_number + 1, self.chunks)) - - # Specify the starting and ending of the file - headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} - - # Grab the data as a stream - r = session.get(url, headers=headers, stream=True) - - with open(self.destination, "r+b") as fp: - fp.seek(start_byte) # Seek to the right of the file - fp.write(r.content) # Write the data - print("Done writing chunk {}/{}".format(chunk_number + 1, self.chunks)) - - return "Complete!" - - @staticmethod - def get_byte_range(url, start_byte=0, end_byte=2048): - headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} - br = requests.get(url, headers=headers).content - return br \ No newline at end of file + # Don't use multi-part download for files below 25 MB + if self.asset["filesize"] < 26214400: + return self.aws_client._download_whole(url) + if self.multi_part == True: + return self.aws_client.multi_thread_download(url) + else: + return self.aws_client._download_whole(url) diff --git a/frameioclient/lib/exceptions.py b/frameioclient/lib/exceptions.py index 8710296a..d9b06914 100644 --- a/frameioclient/lib/exceptions.py +++ b/frameioclient/lib/exceptions.py @@ -4,39 +4,61 @@ class PresentationException(Exception): """ def __init__( - self, - message="Your asset already has a presentation link associated with it." + self, message="Your asset already has a presentation link associated with it." ): self.message = message super().__init__(self.message) + class WatermarkIDDownloadException(Exception): """Exception raised when trying to download a file where there is no available download URL. """ + def __init__( - self, - message="This file is unavailable for download due to security and permission settings." + self, + message="This file is unavailable for download due to security and permission settings.", ): self.message = message super().__init__(self.message) + class DownloadException(Exception): - """Exception raised when trying to download a file - """ + """Exception raised when trying to download a file""" + + def __init__(self, message="Generic Dowload exception."): + self.message = message + super().__init__(self.message) + + +class AssetNotFullyUploaded(Exception): + """Exception raised when trying to download a file that isn't yet fully upload.""" + def __init__( - self, - message="Generic Dowload exception." + self, message="Unable to download this asset because it not yet fully uploaded." ): self.message = message super().__init__(self.message) -class AssetNotFullyUploaded(Exception): - """Exception raised when trying to download a file that isn't yet fully upload. - """ + +class AssetChecksumNotPresent(Exception): + """Exception raised when there's no checksum present for the Frame.io asset.""" + + def __init__( + self, + message="""No checksum found on Frame.io for this asset. This could be because it was uploaded \ + before we introduced the feature, the media pipeline failed to process the asset, or the asset has yet to finish being processed.""", + ): + self.message = message + super().__init__(self.message) + + +class AssetChecksumMismatch(Exception): + """Exception raised when the checksum for the downloaded file doesn't match what's found on Frame.io.""" + def __init__( - self, - message="Unable to download this asset because it not yet fully uploaded." + self, + message="Checksum mismatch, you should re-download the asset to resolve any corrupt bits.", ): self.message = message super().__init__(self.message) diff --git a/frameioclient/lib/logger.py b/frameioclient/lib/logger.py new file mode 100644 index 00000000..7b368545 --- /dev/null +++ b/frameioclient/lib/logger.py @@ -0,0 +1,13 @@ +import logging + + +class SDKLogger: + def __init__(self, log_name): + self.initialize_logger() + self.logger = logging.getLogger(log_name) + + def initialize_logger(self): + logging.basicConfig(level=logging.INFO) + + def info(self, message): + self.logger.info(message) diff --git a/frameioclient/lib/service.py b/frameioclient/lib/service.py new file mode 100644 index 00000000..cf877bf3 --- /dev/null +++ b/frameioclient/lib/service.py @@ -0,0 +1,18 @@ +from ..client import FrameioClient + +class Service(object): + def __init__(self, client: FrameioClient): + self.client = client + self.concurrency = 10 + + # Run auto-configure afterwards + self.autoconfigure() + + def autoconfigure(self): + pass + + def save_config(self): + pass + + def load_config(self): + pass diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py new file mode 100644 index 00000000..211e19da --- /dev/null +++ b/frameioclient/lib/telemetry.py @@ -0,0 +1,91 @@ +import os +import analytics + +from pprint import pprint + +from .logger import SDKLogger +from .version import ClientVersion + +segment_id = os.getenv("SEGMENT_WRITE_KEY", "") # Production +analytics.write_key = segment_id + + +class Telemetry(object): + def __init__(self, user_id): + self.user_id = user_id + self.identity = None + self.context = None + self.integrations = {"all": False, "Amplitude": True} + self.logger = SDKLogger("telemetry") + + self.build_context() + + def build_context(self): + return { + "app": { + "name": "python-frameoclient", + "version": ClientVersion.version(), + } + } + + def push(self, event_name, properties): + self.logger.info((f"Pushing '{event_name}' event to segment", properties)) + + try: + status = analytics.track( + self.user_id, + event_name, + properties=properties, + context=self.build_context(), + integrations=self.integrations, + ) + except Exception as e: + self.logger.info(e, event_name, properties) + + +class Event(Telemetry, object): + def __init__(self, user_id, event_name, properties): + super().__init__(user_id) + self.push(event_name, properties) + + +class ComparisonTest(Event, object): + def __init__(self, transfer_stats, request_logs=[]): + super().__init__() + # self.event_name = event_name + self.transfer_stats = None + # self.requests_logs = requests_logs + + @staticmethod + def _parse_requests_data(req_object): + return { + "speed": 0, + "time_to_first_byte": 0, + "response_time": 0, + "byte_transferred": 0, + "http_status": 200, + "request_type": "GET", + } + + def _build_transfer_stats_payload(self, event_data): + # Turn the request payload into a useful shape + properties = { + "download_speed": 0, + "control": {"upload_bytes_sec": 0, "download_bits_sec": 0, "ping_ms": 0}, + "hash_speed": 0, + } + + return properties + + def track_transfer(self): + for chunk in self.requests_logs: + pprint(chunk) + # self.logger.info(pprint(chunk)) + + # Collect info to build message + + # Build payload for transfer tracking + # stats_payload = self._build_transfer_stats_payload() + + # Push the payload for tracking the transfer + # self.push('python_transfer_stats', stats_payload) diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py new file mode 100644 index 00000000..125e4403 --- /dev/null +++ b/frameioclient/lib/transfer.py @@ -0,0 +1,479 @@ +import concurrent.futures +import math +import os +import time +from pprint import pprint +from random import randint +from typing import Dict, List, Optional + +import requests + +from .exceptions import ( + AssetChecksumMismatch, + AssetChecksumNotPresent, + DownloadException, +) +from .logger import SDKLogger +from .utils import FormatTypes, Utils + +logger = SDKLogger("downloads") + +from .exceptions import ( + AssetNotFullyUploaded, + DownloadException, + WatermarkIDDownloadException, +) +from .transport import HTTPClient + + +class FrameioDownloader(object): + def __init__( + self, + asset: Dict, + download_folder: str, + prefix: str, + multi_part: bool = False, + replace: bool = False, + ): + self.multi_part = multi_part + self.asset = asset + self.asset_type = None + self.download_folder = download_folder + self.replace = replace + self.resolution_map = dict() + self.destination = None + self.watermarked = asset["is_session_watermarked"] # Default is probably false + self.filesize = asset["filesize"] + self.futures = list() + self.checksum = None + self.original_checksum = None + self.checksum_verification = True + self.chunk_size = 25 * 1024 * 1024 # 25 MB chunk size + self.chunks = math.ceil(self.filesize / self.chunk_size) + self.prefix = prefix + self.bytes_started = 0 + self.bytes_completed = 0 + self.in_progress = 0 + self.aws_client = None + self.session = None + self.filename = Utils.normalize_filename(asset["name"]) + self.request_logs = list() + self.stats = False + + self._evaluate_asset() + self._get_path() + + def get_path(self): + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _evaluate_asset(self): + if self.asset.get("_type") != "file": + raise DownloadException( + message=f"Unsupport Asset type: {self.asset.get('_type')}" + ) + + # This logic may block uploads that were started before this field was introduced + if self.asset.get("upload_completed_at") == None: + raise AssetNotFullyUploaded + + try: + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + def _create_file_stub(self): + try: + fp = open(self.destination, "w") + # fp.write(b"\0" * self.filesize) # Disabled to prevent pre-allocatation of disk space + fp.close() + except FileExistsError as e: + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + raise e + return True + + def _get_path(self): + logger.info("prefix: {}".format(self.prefix)) + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _get_checksum(self): + try: + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + return self.original_checksum + + def get_download_key(self): + try: + url = self.asset["original"] + except KeyError as e: + if self.watermarked == True: + resolution_list = list() + try: + for resolution_key, download_url in sorted( + self.asset["downloads"].items() + ): + resolution = resolution_key.split("_")[ + 1 + ] # Grab the item at index 1 (resolution) + try: + resolution = int(resolution) + except ValueError: + continue + + if download_url is not None: + resolution_list.append(download_url) + + # Grab the highest resolution (first item) now + url = resolution_list[0] + except KeyError: + raise DownloadException + else: + raise WatermarkIDDownloadException + + return url + + def download(self): + """Call this to perform the actual download of your asset!""" + + # Check folders + if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): + logger.info("Folder exists, don't need to create it") + else: + logger.info("Destination folder not found, creating") + os.mkdir(self.download_folder) + + # Check files + if os.path.isfile(self.get_path()) == False: + pass + + if os.path.isfile(self.get_path()) and self.replace == True: + os.remove(self.get_path()) + + if os.path.isfile(self.get_path()) and self.replace == False: + logger.info("File already exists at this location.") + return self.destination + + # Get URL + url = self.get_download_key() + + # AWS Client + self.aws_client = AWSClient(downloader=self, concurrency=5) + + # Handle watermarking + if self.watermarked == True: + return self.aws_client._download_whole(url) + + else: + # Don't use multi-part download for files below 25 MB + if self.asset["filesize"] < 26214400: + return self.aws_client._download_whole(url) + if self.multi_part == True: + return self.aws_client.multi_thread_download(url) + else: + return self.aws_client._download_whole(url) + + +class AWSClient(HTTPClient, object): + def __init__(self, downloader: FrameioDownloader, concurrency=None, progress=True): + super().__init__(self) # Initialize via inheritance + self.progress = progress + self.progress_manager = None + self.destination = downloader.destination + self.bytes_started = 0 + self.bytes_completed = 0 + self.downloader = downloader + self.futures = [] + self.original = self.downloader.asset["original"] + + # Ensure this is a valid number before assigning + if concurrency is not None and type(concurrency) == int and concurrency > 0: + self.concurrency = concurrency + # else: + # self.concurrency = self._optimize_concurrency() + + @staticmethod + def check_cdn(url): + # TODO improve this algo + if "assets.frame.io" in url: + return "Cloudfront" + elif "s3" in url: + return "S3" + else: + return None + + def _create_file_stub(self): + try: + fp = open(self.downloader.destination, "w") + # fp.write(b"\0" * self.filesize) # Disabled to prevent pre-allocatation of disk space + fp.close() + except FileExistsError as e: + if self.replace == True: + os.remove(self.downloader.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + print(e) + raise e + except TypeError as e: + print(e) + raise e + return True + + def _optimize_concurrency(self): + """ + This method looks as the net_stats and disk_stats that we've run on \ + the current environment in order to suggest the best optimized \ + number of concurrent TCP connections. + + Example:: + AWSClient._optimize_concurrency() + """ + + return 5 + + def _get_byte_range( + self, url: str, start_byte: Optional[int] = 0, end_byte: Optional[int] = 2048 + ): + """ + Get a specific byte range from a given URL. This is **not** optimized \ + for heavily-threaded operations currently. + + :Args: + url (string): The URL you want to fetch a byte-range from + start_byte (int): The first byte you want to request + end_byte (int): The last byte you want to extract + + Example:: + AWSClient().get_byte_range(asset, "~./Downloads") + """ + + range_header = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + + headers = {**self.shared_headers, **range_header} + + br = requests.get(url, headers=headers).content + return br + + def _download_whole(self, url: str): + start_time = time.time() + print( + "Beginning download -- {} -- {}".format( + self.asset["name"], + Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), + ) + ) + + # Downloading + self.session = self._get_session() + r = self.session.get(url, stream=True) + + # Downloading + with open(self.downloader.destination, "wb") as handle: + try: + # TODO make sure this approach works for SBWM download + for chunk in r.iter_content(chunk_size=4096): + if chunk: + handle.write(chunk) + except requests.exceptions.ChunkedEncodingError as e: + raise e + + download_time = time.time() - start_time + download_speed = Utils.format_value( + math.ceil(self.downloader.filesize / (download_time)) + ) + print( + f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {Utils.format_value(download_speed, type=FormatTypes.SPEED)}" + ) + + return self.destination, download_speed + + def _download_chunk(self, task: List): + # Download a particular chunk + # Called by the threadpool executor + + # Destructure the task object into its parts + url = task[0] + start_byte = task[1] + end_byte = task[2] + chunk_number = task[3] + # in_progress = task[4] + + # Set the initial chunk_size, but prepare to overwrite + chunk_size = end_byte - start_byte + + if self.bytes_started + (chunk_size) > self.downloader.filesize: + difference = abs( + self.downloader.filesize - (self.bytes_started + chunk_size) + ) # should be negative + chunk_size = chunk_size - difference + print(f"Chunk size as done via math: {chunk_size}") + else: + pass + + # Set chunk size in a smarter way + self.bytes_started += chunk_size + + # Specify the start and end of the range request + headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + + # Grab the data as a stream + self.session = self._get_session() + r = self.session.get(url, headers=headers, stream=True) + + # Write the file to disk + with open(self.destination, "r+b") as fp: + fp.seek(start_byte) # Seek to the right spot in the file + chunk_size = len(r.content) # Get the final chunk size + fp.write(r.content) # Write the data + + # Save requests logs + self.downloader.request_logs.append( + { + "headers": r.headers, + "http_status": r.status_code, + "bytes_transferred": len(r.content), + } + ) + + # Increase the count for bytes_completed, but only if it doesn't overrun file length + self.bytes_completed += chunk_size + if self.bytes_completed > self.downloader.filesize: + self.bytes_completed = self.downloader.filesize + + # After the function completes, we report back the # of bytes transferred + return chunk_size + + def multi_thread_download(self): + start_time = time.time() + + # Generate stub + try: + self._create_file_stub() + except Exception as e: + raise DownloadException(message=e) + + pprint(self.downloader) + + offset = math.ceil(self.downloader.filesize / self.downloader.chunks) + in_byte = 0 # Set initially here, but then override + + print( + f"Multi-part download -- {self.downloader.asset['name']} -- {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)}" + ) + + with concurrent.futures.ThreadPoolExecutor( + max_workers=self.concurrency + ) as executor: + for i in range(int(self.downloader.chunks)): + # Increment by the iterable + 1 so we don't mutiply by zero + out_byte = offset * (i + 1) + + # Create task tuple + task = (self.downloader.asset["original"], in_byte, out_byte, i) + + # Stagger start for each chunk by 0.1 seconds + if i < self.concurrency: + time.sleep(randint(1, 5) / 10) + + # Append tasks to futures list + self.futures.append(executor.submit(self._download_chunk, task)) + + # Reset new in byte equal to last out byte + in_byte = out_byte + + # Wait on threads to finish + for future in concurrent.futures.as_completed(self.futures): + try: + chunk_size = future.result() + print(chunk_size) + except Exception as exc: + print(exc) + + # Calculate and print stats + download_time = round((time.time() - start_time), 2) + pprint(self.downloader) + download_speed = round((self.downloader.filesize / download_time), 2) + + # TODO: Ensure this works correctly on assets that are missing checksums/at all + # if self.downloader.checksum_verification == True: + # # Check for checksum, if not present throw error + # if self.downloader._get_checksum() == None: + # raise AssetChecksumNotPresent + + # # Calculate the file hash + # if ( + # Utils.calculate_hash(self.destination) + # != self.downloader.original_checksum + # ): + # raise AssetChecksumMismatch + + # Log completion event + SDKLogger("downloads").info( + f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {Utils.format_value(download_speed, type=FormatTypes.SPEED)}" + ) + + # Submit telemetry + transfer_stats = { + "speed": download_speed, + "time": download_time, + "cdn": AWSClient.check_cdn(self.original), + } + + # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) + + # If stats = True, we return a dict with way more info, otherwise \ + if self.downloader.stats: + # We end by returning a dict with info about the download + dl_info = { + "destination": self.destination, + "speed": download_speed, + "elapsed": download_time, + "cdn": AWSClient.check_cdn(self.original), + "concurrency": self.concurrency, + "size": self.downloader.filesize, + "chunks": self.downloader.chunks, + } + return dl_info + else: + return self.destination + + +class TransferJob(AWSClient): + # These will be used to track the job and then push telemetry + def __init__(self, job_info): + self.job_info = job_info # < - convert to JobInfo class + self.cdn = "S3" # or 'CF' - use check_cdn to confirm + self.progress_manager = None + + +class DownloadJob(TransferJob): + def __init__(self): + self.asset_type = "review_link" # we should use a dataclass here + # Need to create a re-usable job schema + # Think URL -> output_path + pass + + +class UploadJob(TransferJob): + def __init__(self, destination): + self.destination = destination + # Need to create a re-usable job schema + # Think local_file path and remote Frame.io destination + pass diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py new file mode 100644 index 00000000..09ac0ce4 --- /dev/null +++ b/frameioclient/lib/transport.py @@ -0,0 +1,194 @@ +import concurrent.futures +import threading +import time +from typing import Dict, Optional + +import requests +from requests.adapters import HTTPAdapter +from token_bucket import Limiter, MemoryStorage +from urllib3.util.retry import Retry + +from .constants import default_thread_count, retryable_statuses +from .exceptions import PresentationException +from .utils import PaginatedResponse +from .version import ClientVersion + + +class HTTPMethods: + GET = "get" + POST = "post" + PUT = "put" + DELETE = "delete" + PATCH = "patch" + HEAD = "head" + + +class HTTPClient(object): + """HTTP Client base that automatically handles the following: + - Shared thread/session object + - Client version headers + - Automated retries + + """ + + def __init__(self, threads: Optional[int] = default_thread_count): + """ + :param threads: Number of threads to use concurrently. + """ + + # Setup number of threads to use + self.threads = threads + + # Initialize empty thread object + self.thread_local = None + self.client_version = ClientVersion.version() + self.shared_headers = {"x-frameio-client": f"python/{self.client_version}"} + + # Configure retry strategy (very broad right now) + self.retry_strategy = Retry( + total=100, + backoff_factor=2, + status_forcelist=retryable_statuses, + method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"], + ) + + # Create real thread + self._initialize_thread() + + def _initialize_thread(self): + self.thread_local = threading.local() + + def _get_session(self): + # Create session only if needed + if not hasattr(self.thread_local, "session"): + http = requests.Session() + adapter = HTTPAdapter(max_retries=self.retry_strategy) + adapter.add_headers(self.shared_headers) # add version header + http.mount("https://", adapter) + http.mount("http://", adapter) + self.thread_local.session = http + + # Return session + return self.thread_local.session + + +class APIClient(HTTPClient, object): + """Frame.io API Client that handles automatic pagination, and lots of other nice things. + + Args: + HTTPClient (class): HTTP Client base class + token (str): Frame.io developer token, JWT, or OAuth access token. + threads (int): Number of threads to concurrently use for uploads/downloads. + progress (bool): If True, show status bars in console. + """ + + def __init__(self, token: str, host: str, threads: int, progress: bool): + super().__init__(threads) + self.host = host + self.token = token + self.threads = threads + self.progress = progress + self._initialize_thread() + self.session = self._get_session() + self.auth_header = {"Authorization": f"Bearer {self.token}"} + + def _format_api_call(self, endpoint: str): + return f"{self.host}/v2{endpoint}" + + def _api_call( + self, method, endpoint: str, payload: Dict = {}, limit: Optional[int] = None + ): + headers = {**self.shared_headers, **self.auth_header} + + r = self.session.request( + method, self._format_api_call(endpoint), headers=headers, json=payload + ) + + if r.ok: + if r.headers.get("page-number"): + if int(r.headers.get("total-pages")) > 1: + return PaginatedResponse( + results=r.json(), + limit=limit, + page_size=r.headers["per-page"], + total_pages=r.headers["total-pages"], + total=r.headers["total"], + endpoint=endpoint, + method=method, + payload=payload, + client=self, + ) + + if isinstance(r.json(), list): + return r.json()[:limit] + + return r.json() + + if r.status_code == 422 and "presentation" in endpoint: + raise PresentationException + + if r.status_code == 500 and 'audit' in endpoint: + print(f"Hit a 500 on page: {r.headers.get('page-number')}, url: {r.url}") + return [] + + return r.raise_for_status() + + def get_specific_page( + self, method: HTTPMethods, endpoint: str, payload: Dict, page: int + ): + """ + Gets a specific page for that endpoint, used by Pagination Class + + :Args: + method (string): 'get', 'post' + endpoint (string): endpoint ('/accounts//teams') + payload (dict): Request payload + page (int): What page to get + """ + if method == HTTPMethods.GET: + endpoint = "{endpoint}?page={page}" + return self._api_call(method, endpoint) + + if method == HTTPMethods.POST: + payload["page"] = page + return self._api_call(method, endpoint, payload=payload) + + def exec_stream(callable, iterable, sync=lambda _: False, capacity=10, rate=10): + """ + Executes a stream according to a defined rate limit. + """ + limiter = Limiter(capacity, rate, MemoryStorage()) + futures = set() + + def execute(operation): + return (operation, callable(operation)) + + with concurrent.futures.ThreadPoolExecutor(max_workers=capacity) as executor: + while True: + if not limiter.consume("stream", 1): + start = int(time.time()) + done, pending = concurrent.futures.wait( + futures, return_when=concurrent.futures.FIRST_COMPLETED + ) + for future in done: + yield future.result() + + futures = pending + if (int(time.time()) - start) < 1: + time.sleep( + 1.0 / rate + ) # guarantee there's capacity in the rate limit at end of the loop + + operation = next(iterable, None) + + if not operation: + done, _ = concurrent.futures.wait(futures) + for future in done: + yield future.result() + break + + if sync(operation): + yield execute(operation) + continue + + futures.add(executor.submit(execute, operation)) diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 448dae1b..d8babb6f 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -1,78 +1,161 @@ -import os +import concurrent.futures import math -import requests +import os import threading -import concurrent.futures +from typing import List + +import requests + +from .utils import FormatTypes, Utils thread_local = threading.local() + class FrameioUploader(object): - def __init__(self, asset, file): - self.asset = asset - self.file = file - self.chunk_size = None - - def _calculate_chunks(self, total_size, chunk_count): - self.chunk_size = int(math.ceil(total_size / chunk_count)) - - chunk_offsets = list() - - for index in range(chunk_count): - offset_amount = index * self.chunk_size - chunk_offsets.append(offset_amount) - - return chunk_offsets - - def _get_session(self): - if not hasattr(thread_local, "session"): - thread_local.session = requests.Session() - return thread_local.session - - def _smart_read_chunk(self, chunk_offset, is_final_chunk): - with open(os.path.realpath(self.file.name), "rb") as file: - file.seek(chunk_offset, 0) - if is_final_chunk: # If it's the final chunk, we want to just read until the end of the file - data = file.read() - else: # If it's not the final chunk, we want to ONLY read the specified chunk - data = file.read(self.chunk_size) - return data - - def _upload_chunk(self, task): - url = task[0] - chunk_offset = task[1] - chunk_id = task[2] - chunks_total = len(self.asset['upload_urls']) - - is_final_chunk = False - - if chunk_id+1 == chunks_total: - is_final_chunk = True - - session = self._get_session() - - chunk_data = self._smart_read_chunk(chunk_offset, is_final_chunk) - - try: - r = session.put(url, data=chunk_data, headers={ - 'content-type': self.asset['filetype'], - 'x-amz-acl': 'private' - }) - # print("Completed chunk, status: {}".format(r.status_code)) - except Exception as e: - print(e) - - r.raise_for_status() - - def upload(self): - total_size = self.asset['filesize'] - upload_urls = self.asset['upload_urls'] - - chunk_offsets = self._calculate_chunks(total_size, chunk_count=len(upload_urls)) - - with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - for i in range(len(upload_urls)): - url = upload_urls[i] - chunk_offset = chunk_offsets[i] - - task = (url, chunk_offset, i) - executor.submit(self._upload_chunk, task) + def __init__(self, asset=None, file=None): + self.asset = asset + self.file = file + self.chunk_size = None + self.file_count = 0 + self.file_num = 0 + self.futures = [] + + def _calculate_chunks(self, total_size: int, chunk_count: int) -> List[int]: + """ + Calculate chunk size + + :param total_size: Total filesize in bytes + :param chunk_count: Total number of URL's we got back from the API + + :return chunk_offsets: List of chunk offsets + """ + self.chunk_size = int(math.ceil(total_size / chunk_count)) + + chunk_offsets = list() + + for index in range(chunk_count): + offset_amount = index * self.chunk_size + chunk_offsets.append(offset_amount) + + return chunk_offsets + + def _get_session(self): + if not hasattr(thread_local, "session"): + thread_local.session = requests.Session() + return thread_local.session + + def _smart_read_chunk(self, chunk_offset: int, is_final_chunk: bool) -> bytes: + with open(os.path.realpath(self.file.name), "rb") as file: + file.seek(chunk_offset, 0) + if ( + is_final_chunk + ): # If it's the final chunk, we want to just read until the end of the file + data = file.read() + else: # If it's not the final chunk, we want to ONLY read the specified chunk + data = file.read(self.chunk_size) + return data + + def _upload_chunk(self, task) -> int: + url = task[0] + chunk_offset = task[1] + chunk_id = task[2] + chunks_total = len(self.asset["upload_urls"]) + + is_final_chunk = False + + if chunk_id + 1 == chunks_total: + is_final_chunk = True + + session = self._get_session() + chunk_data = self._smart_read_chunk(chunk_offset, is_final_chunk) + + try: + r = session.put( + url, + data=chunk_data, + headers={ + "content-type": self.asset["filetype"], + "x-amz-acl": "private", + }, + ) + # print("Completed chunk, status: {}".format(r.status_code)) + except Exception as e: + print(e) + + r.raise_for_status() + + return len(chunk_data) + + def upload(self): + total_size = self.asset["filesize"] + upload_urls = self.asset["upload_urls"] + + chunk_offsets = self._calculate_chunks(total_size, chunk_count=len(upload_urls)) + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + for i in range(len(upload_urls)): + url = upload_urls[i] + chunk_offset = chunk_offsets[i] + + task = (url, chunk_offset, i) + self.futures.append(executor.submit(self._upload_chunk, task)) + + # Keep updating the progress while we have > 0 bytes left. + # Wait on threads to finish + for future in concurrent.futures.as_completed(self.futures): + try: + chunk_size = future.result() + except Exception as exc: + print(exc) + + def file_counter(self, folder): + matches = [] + for root, dirnames, filenames in os.walk(folder): + for filename in filenames: + matches.append(os.path.join(filename)) + + self.file_count = len(matches) + + return matches + + def recursive_upload(self, client, folder, parent_asset_id): + # Seperate files and folders: + file_list = list() + folder_list = list() + + if self.file_count == 0: + self.file_counter(folder) + + for item in os.listdir(folder): + if item == ".DS_Store": # Ignore .DS_Store files on Mac + continue + + complete_item_path = os.path.join(folder, item) + + if os.path.isfile(complete_item_path): + file_list.append(item) + else: + folder_list.append(item) + + for file_p in file_list: + self.file_num += 1 + + complete_dir_obj = os.path.join(folder, file_p) + print( + "Starting {:02d}/{}, Size: {}, Name: {}".format( + self.file_num, + self.file_count, + Utils.format_value( + os.path.getsize(complete_dir_obj), type=FormatTypes.SIZE + ), + file_p, + ) + ) + client.assets.upload(parent_asset_id, complete_dir_obj) + + for folder_name in folder_list: + new_folder = os.path.join(folder, folder_name) + new_parent_asset_id = client.assets.create( + parent_asset_id=parent_asset_id, name=folder_name, type="folder" + )["id"] + + self.recursive_upload(client, new_folder, new_parent_asset_id) diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 7f3ef571..3d18b670 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -1,191 +1,262 @@ +import enum +import os import re import sys +from typing import Any, Dict, Optional + import xxhash KB = 1024 MB = KB * KB +ENV = os.getenv("FRAMEIO_ENVIRONMENT", "prod") + + +def ApiReference(*args, **kwargs): + def inner(func): + """ + do operations with func + """ + if ENV == "build": + print("API Operation: {}".format(kwargs.get("operation"))) + + return func + + return inner + + +class FormatTypes(enum.Enum): + SPEED = 0 + SIZE = 1 class Utils: - @staticmethod - def stream(func, page=1, page_size=20): - """ - Accepts a lambda of a call to a client list method, and streams the results until - the list has been exhausted + @staticmethod + def stream(func, page=1, page_size=20): + """ + Accepts a lambda of a call to a client list method, and streams the results until \ + the list has been exhausted. - :Args: - fun (function): A 1-arity function to apply during the stream + Args: + fun (function): A 1-arity function to apply during the stream - Example:: - stream(lambda pagination: client.get_collaborators(project_id, **pagination)) + Example:: + + stream(lambda pagination: client.get_collaborators(project_id, **pagination)) """ - total_pages = page - while page <= total_pages: - result_list = func(page=page, page_size=page_size) - total_pages = result_list.total_pages - for res in result_list: - yield res + total_pages = page + while page <= total_pages: + result_list = func(page=page, page_size=page_size) + total_pages = result_list.total_pages + for res in result_list: + yield res - page += 1 + page += 1 - @staticmethod - def format_bytes(size, type="speed"): - """ - Convert bytes to KB/MB/GB/TB/s - """ - # 2**10 = 1024 - power = 2**10 - n = 0 - power_labels = {0 : 'B', 1: 'KB', 2: 'MB', 3: 'GB', 4: 'TB'} + @staticmethod + def format_value(value: int, type: FormatTypes = FormatTypes.SIZE) -> str: + """ + Convert bytes to KB/MB/GB/TB/s - while size > power: - size /= power - n += 1 + :param value: a numeric value + :param type: the FormatType specified + """ + # 2**10 = 1024 + power = 2 ** 10 + n = 0 + power_labels = {0: "B", 1: "KB", 2: "MB", 3: "GB", 4: "TB"} - formatted = " ".join((str(round(size, 2)), power_labels[n])) + while value > power: + value /= power + n += 1 - if type == "speed": - return formatted + "/s" - - elif type == "size": - return formatted + formatted = " ".join((str(round(value, 2)), power_labels[n])) - @staticmethod - def calculate_hash(file_path): - """ - Calculate an xx64hash - """ - xxh64_hash = xxhash.xxh64() - b = bytearray(MB * 8) - f = open(file_path, "rb") - while True: - numread = f.readinto(b) - if not numread: - break - xxh64_hash.update(b[:numread]) - - xxh64_digest = xxh64_hash.hexdigest() + if type == FormatTypes.SPEED: + return formatted + "/s" - return xxh64_digest + elif type == FormatTypes.SIZE: + return formatted - @staticmethod - def compare_items(dict1, dict2): - """ - Python 2 and 3 compatible way of comparing 2x dictionaries - """ - comparison = None + @staticmethod + def calculate_hash(file_path: str, progress_callback: Optional[Any] = None): + """ + Calculate an xx64hash - if sys.version_info.major >= 3: - import operator - comparison = operator.eq(dict1, dict2) - - else: - if dict1 == dict2: - comparison = True + :param file_path: The path on your system to the file you'd like to checksum + :param progress_callback: A progress callback to use when you want to callback w/ progress + """ + xxh64_hash = xxhash.xxh64() + b = bytearray(MB * 8) + f = open(file_path, "rb") + while True: + numread = f.readinto(b) + if not numread: + break - if comparison == False: - print("File mismatch between upload and download") + xxh64_hash.update(b[:numread]) - return comparison + if progress_callback: + # Should only subtract 1 here when necessary, not every time! + progress_callback(float(numread - 1), force=True) - @staticmethod - def get_valid_filename(s): - """ - Strip out invalid characters from a filename using regex - """ - s = str(s).strip().replace(' ', '_') - return re.sub(r'(?u)[^-\w.]', '', s) + xxh64_digest = xxh64_hash.hexdigest() + + return xxh64_digest + + @staticmethod + def compare_items(dict1: Dict, dict2: Dict) -> bool: + """ + Python 2 and 3 compatible way of comparing 2x dictionaries + + :param dict1: Dictionary 1 for comparison + :param dict2: Dictionary 2 for comparison + """ + comparison = None + + if sys.version_info.major >= 3: + import operator + + comparison = operator.eq(dict1, dict2) - @staticmethod - def normalize_filename(fn): - """ - Normalize filename using pure python - """ - validchars = "-_.() " - out = "" - - if isinstance(fn, str): - pass - elif isinstance(fn, unicode): - fn = str(fn.decode('utf-8', 'ignore')) - else: - pass - - for c in fn: - if str.isalpha(c) or str.isdigit(c) or (c in validchars): - out += c else: - out += "_" - return out + if dict1 == dict2: + comparison = True + + if comparison == False: + print("File mismatch between upload and download") + + return comparison + + @staticmethod + def get_valid_filename(s: str) -> str: + """ + Strip out invalid characters from a filename using regex + + :param s: Filename to remove invalid characters from + """ + s = str(s).strip().replace(" ", "_") + return re.sub(r"(?u)[^-\w.]", "", s) + + @staticmethod + def normalize_filename(fn: str) -> str: + """ + Normalize filename using pure python + + :param fn: Filename to normalize using pure python + """ + validchars = "-_.() " + out = "" + + if isinstance(fn, str): + pass + elif isinstance(fn, unicode): + fn = str(fn.decode("utf-8", "ignore")) + else: + pass + + for c in fn: + if str.isalpha(c) or str.isdigit(c) or (c in validchars): + out += c + else: + out += "_" + return out - @staticmethod - def format_headers(token, version): - return { - 'Authorization': 'Bearer {}'.format(token), - 'x-frameio-client': 'python/{}'.format(version) - } + @staticmethod + def format_headers(token: str, version: str) -> Dict: + """[summary] + + :param token: Frame.io OAuth/Dev Token to use + :param version: The version of the frameioclient sdk to add to our HTTP header + """ + return { + "Authorization": f"Bearer {token}", + "x-frameio-client": f"python/{version}", + } class PaginatedResponse(object): - def __init__(self, results=[], limit=None, page_size=0, total=0, - total_pages=0, endpoint=None, method=None, payload={}, - client=None): - self.results = results - - self.limit = limit - self.page_size = int(page_size) - self.total = int(total) - self.total_pages = int(total_pages) - - self.endpoint = endpoint - self.method = method - self.payload = payload - self.client = client - - self.asset_index = 0 # Index on current page - self.returned = 0 # Total returned count - self.current_page = 1 - - def __iter__(self): - return self - - def __next__(self): - # Reset if we've reached end - if self.returned == self.limit or self.returned == self.total: - self.asset_index = 0 - self.returned = 0 - self.current_page = 1 - - self.results = self.client.get_specific_page( - self.method, self.endpoint, self.payload, page=1).results - raise StopIteration - - if self.limit is None or self.returned < self.limit: - if self.asset_index < self.page_size and self.returned < self.total: - self.asset_index += 1 - self.returned += 1 - return self.results[self.asset_index - 1] - raise StopIteration + def __init__( + self, + results=[], + limit=None, + page_size=0, + total=0, + total_pages=0, + endpoint=None, + method=None, + payload={}, + client=None, + ): + self.results = results + + self.limit = limit + self.page_size = int(page_size) + self.total = int(total) + self.total_pages = int(total_pages) + + self.endpoint = endpoint + self.method = method + self.payload = payload + self.client = client + + self.asset_index = 0 # Index on current page + self.returned = 0 # Total returned count + self.current_page = 1 + + def __iter__(self): + return self + + def __next__(self): + # Reset if we've reached end + if self.returned == self.limit or self.returned == self.total: + self.asset_index = 0 + self.returned = 0 + self.current_page = 1 + + self.results = self.client.get_specific_page( + self.method, self.endpoint, self.payload, page=1 + ).results + raise StopIteration + + if self.limit is None or self.returned < self.limit: + if self.asset_index < self.page_size and self.returned < self.total: + self.asset_index += 1 + self.returned += 1 + return self.results[self.asset_index - 1] + raise StopIteration + + if self.current_page < self.total_pages: + self.current_page += 1 + self.asset_index = 1 + self.returned += 1 + + self.results = self.client.get_specific_page( + self.method, self.endpoint, self.payload, self.current_page + ).results + + return self.results[self.asset_index - 1] + raise StopIteration - if self.current_page < self.total_pages: - self.current_page += 1 - self.asset_index = 1 - self.returned += 1 + raise StopIteration - self.results = self.client.get_specific_page( - self.method, self.endpoint, self.payload, self.current_page).results + def next(self): # Python 2 + return self.__next__() - return self.results[self.asset_index - 1] - raise StopIteration + def __len__(self): + if self.limit and self.limit < self.total: + return self.limit - raise StopIteration + return self.total - def next(self): # Python 2 - return self.__next__() - def __len__(self): - if self.limit and self.limit < self.total: - return self.limit +class ProgressBar(object): + def __init__(self, parent=None, total=0, iterable=[]): + self.parent = parent + self.total = total + self.iterable = iterable - return self.total + def create(self): + pass + def update(self): + pass diff --git a/frameioclient/lib/version.py b/frameioclient/lib/version.py index 4f09e4e5..3a8f3ae9 100644 --- a/frameioclient/lib/version.py +++ b/frameioclient/lib/version.py @@ -4,7 +4,8 @@ # Running on pre-3.8 Python; use importlib-metadata package import importlib_metadata as metadata + class ClientVersion: @staticmethod def version(): - return metadata.version('frameioclient') + return metadata.version("frameioclient") diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py deleted file mode 100644 index b64d56d7..00000000 --- a/frameioclient/service/assets.py +++ /dev/null @@ -1,245 +0,0 @@ -import os -import mimetypes - -from .service import Service -from .projects import Project - -from ..lib import FrameioUploader, FrameioDownloader - -class Asset(Service): - def get(self, asset_id): - """ - Get an asset by id. - - :Args: - asset_id (string): The asset id. - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('get', endpoint) - - def get_children(self, asset_id, **kwargs): - """ - Get a folder. - - :Args: - asset_id (string): The asset id. - """ - endpoint = '/assets/{}/children'.format(asset_id) - return self.client._api_call('get', endpoint, kwargs) - - def create(self, parent_asset_id, **kwargs): - """ - Create an asset. - - :Args: - parent_asset_id (string): The parent asset id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - - client.assets.create( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - filetype="video/mp4", - filesize=123456 - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def from_url(self, parent_asset_id, name, url): - """ - Create an asset from a URL. - - :Args: - parent_asset_id (string): The parent asset id. - name (string): The filename. - url (string): The remote URL. - - Example:: - - client.assets.from_url( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - url="https://" - ) - """ - - payload = { - "name": name, - "type": "file", - "source": { - "url": url - } - } - - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=payload) - - def update(self, asset_id, **kwargs): - """ - Updates an asset - - :Args: - asset_id (string): the asset's id - :Kwargs: - the fields to update - - Example:: - client.assets.update("adeffee123342", name="updated_filename.mp4") - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('put', endpoint, kwargs) - - def copy(self, destination_folder_id, **kwargs): - """ - Copy an asset - - :Args: - destination_folder_id (string): The id of the folder you want to copy into. - :Kwargs: - id (string): The id of the asset you want to copy. - - Example:: - client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") - """ - endpoint = '/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, kwargs) - - def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): - """Bulk copy assets - - :Args: - destination_folder_id (string): The id of the folder you want to copy into. - :Kwargs: - asset_list (list): A list of the asset IDs you want to copy. - copy_comments (boolean): Whether or not to copy comments: True or False. - - Example:: - client.assets.bulk_copy("adeffee123342", asset_list=["7ee008c5-49a2-f8b5-997d-8b64de153c30", \ - "7ee008c5-49a2-f8b5-997d-8b64de153c30"], copy_comments=True) - """ - - payload = {"batch": []} - new_list = list() - - if copy_comments: - payload['copy_comments'] = "all" - - for asset in asset_list: - payload['batch'].append({"id": asset}) - - endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, payload) - - def delete(self, asset_id): - """ - Delete an asset - - :Args: - asset_id (string): the asset's id - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('delete', endpoint) - - def _upload(self, asset, file): - """ - Upload an asset. The method will exit once the file is uploaded. - - :Args: - asset (object): The asset object. - file (file): The file to upload. - - Example:: - client._upload(asset, open('example.mp4')) - """ - - uploader = FrameioUploader(asset, file) - uploader.upload() - - # def upload_folder(self, destination_id, folderpath): - # try: - # if os.path.isdir(folderpath): - # # Good it's a directory, we can keep going - - # except OSError: - # if not os.path.exists(folderpath): - # sys.exit("Folder doesn't exist, exiting...") - - def build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - - def upload(self, destination_id, filepath, asset=None): - """ - Upload a file. The method will exit once the file is downloaded. - - :Args: - destination_id (uuid): The destination Project or Folder ID. - filepath (string): The locaiton of the file on your local filesystem \ - that you want to upload. - - Example:: - - client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") - """ - - # Check if destination is a project or folder - # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided - # Then we start our upload - - try: - # First try to grab it as a folder - folder_id = self.get(destination_id)['id'] - except Exception as e: - # Then try to grab it as a project - folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] - finally: - file_info = self.build_asset_info(filepath) - - if not asset: - try: - asset = self.create(folder_id, - type="file", - name=file_info['filename'], - filetype=file_info['mimetype'], - filesize=file_info['filesize'] - ) - - except Exception as e: - print(e) - - try: - with open(file_info['filepath'], "rb") as fp: - self._upload(asset, fp) - - except Exception as e: - print(e) - - return asset - - def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5): - """ - Download an asset. The method will exit once the file is downloaded. - - :Args: - asset (object): The asset object. - download_folder (path): The location to download the file to. - - Example:: - - client.assets.download(asset, "~./Downloads") - """ - downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, concurrency) - return downloader.download_handler() \ No newline at end of file diff --git a/frameioclient/service/comments.py b/frameioclient/service/comments.py deleted file mode 100644 index d4e6adbc..00000000 --- a/frameioclient/service/comments.py +++ /dev/null @@ -1,89 +0,0 @@ -from .service import Service - -class Comment(Service): - def create(self, asset_id, **kwargs): - """ - Create a comment. - - :Args: - asset_id (string): The asset id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - - client.comments.create( - asset_id="123abc", - text="Hello world" - ) - """ - endpoint = '/assets/{}/comments'.format(asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def get(self, comment_id, **kwargs): - """ - Get a comment. - - :Args: - comment_id (string): The comment id. - """ - endpoint = '/comments/{}'.format(comment_id) - return self.client._api_call('get', endpoint, **kwargs) - - def list(self, asset_id, **kwargs): - """ - Get an asset's comments. - - :Args: - asset_id (string): The asset id. - """ - endpoint = '/assets/{}/comments'.format(asset_id) - return self.client._api_call('get', endpoint, **kwargs) - - def update(self, comment_id, **kwargs): - """ - Update a comment. - - :Args: - comment_id (string): The comment id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - - client.comments.update( - comment_id="123abc", - text="Hello world" - ) - """ - endpoint = '/comments/{}'.format(comment_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def delete(self, comment_id): - """ - Delete a comment. - - :Args: - comment_id (string): The comment id. - """ - endpoint = '/comments/{}'.format(comment_id) - return self.client._api_call('delete', endpoint) - - def reply(self, comment_id, **kwargs): - """ - Reply to an existing comment. - - :Args: - comment_id (string): The comment id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - - client.comments.reply( - comment_id="123abc", - text="Hello world" - ) - """ - endpoint = '/comments/{}/replies'.format(comment_id) - return self.client._api_call('post', endpoint, payload=kwargs) diff --git a/frameioclient/service/links.py b/frameioclient/service/links.py deleted file mode 100644 index 0f666e81..00000000 --- a/frameioclient/service/links.py +++ /dev/null @@ -1,121 +0,0 @@ -from .service import Service - -class ReviewLink(Service): - def create(self, project_id, **kwargs): - """ - Create a review link. - - :Args: - project_id (string): The project id. - :Kwargs: - kwargs: additional request parameters. - - Example:: - - client.review_links.create( - project_id="123", - name="My Review Link", - password="abc123" - ) - """ - endpoint = '/projects/{}/review_links'.format(project_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def list(self, project_id): - """ - Get the review links of a project - - :Args: - asset_id (string): The asset id. - """ - endpoint = '/projects/{}/review_links'.format(project_id) - return self.client._api_call('get', endpoint) - - def get(self, link_id, **kwargs): - """ - Get a single review link - - :Args: - link_id (string): The review link id. - """ - endpoint = '/review_links/{}'.format(link_id) - return self.client._api_call('get', endpoint, payload=kwargs) - - def get_assets(self, link_id): - """ - Get items from a single review link. - - :Args: - link_id (string): The review link id. - - Example:: - - client.review_links.get_assets( - link_id="123" - ) - """ - endpoint = '/review_links/{}/items'.format(link_id) - return self.client._api_call('get', endpoint) - - def update_assets(self, link_id, **kwargs): - """ - Add or update assets for a review link. - - :Args: - link_id (string): The review link id. - :Kwargs: - kwargs: additional request parameters. - - Example:: - - client.review_links.update_assets( - link_id="123", - asset_ids=["abc","def"] - ) - """ - endpoint = '/review_links/{}/assets'.format(link_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def update_settings(self, link_id, **kwargs): - """ - Updates review link settings. - - :Args: - link_id (string): The review link id. - :Kwargs: - kwargs: additional request parameters. - - Example:: - - client.review_links.update_settings( - link_id, - expires_at="2020-04-08T12:00:00+00:00", - is_active=False, - name="Review Link 123", - password="my_fun_password", - ) - """ - endpoint = '/review_links/{}'.format(link_id) - return self.client._api_call('put', endpoint, payload=kwargs) - - -class PresentationLink(Service): - def create(self, asset_id, **kwargs): - """ - Create a presentation link. - - :Args: - asset_id (string): The asset id. - :Kwargs: - kwargs: additional request parameters. - - Example:: - - client.presentation_links.create( - asset_id="9cee7966-4066-b326-7db1-f9e6f5e929e4", - title="My fresh presentation", - password="abc123" - ) - """ - endpoint = '/assets/{}/presentations'.format(asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) diff --git a/frameioclient/service/logs.py b/frameioclient/service/logs.py deleted file mode 100644 index 2ae8bc30..00000000 --- a/frameioclient/service/logs.py +++ /dev/null @@ -1,17 +0,0 @@ -from .service import Service - -class AuditLogs(Service): - def list(self, account_id): - """ - Get audit logs for the currently authenticated account. - - :Args: - - Example:: - - client.logs.list( - account_id="6bdcb4d9-9a2e-a765-4548-ae6b27a6c024" - ) - """ - endpoint = '/accounts/{}/audit_logs'.format(account_id) - return self.client._api_call('get', endpoint) diff --git a/frameioclient/service/projects.py b/frameioclient/service/projects.py deleted file mode 100644 index c1e52923..00000000 --- a/frameioclient/service/projects.py +++ /dev/null @@ -1,103 +0,0 @@ -from .service import Service - -class Project(Service): - def create(self, team_id, **kwargs): - """ - Create a project. - - :Args: - team_id (string): The team id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - client.projects.create( - team_id="123", - name="My Awesome Project" - ) - """ - endpoint = '/teams/{}/projects'.format(team_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def get(self, project_id): - """ - Get an individual project - - :Args: - project_id (string): The project's id - - Example:: - client.project.get( - project_id="123" - ) - - """ - endpoint = '/projects/{}'.format(project_id) - return self.client._api_call('get', endpoint) - - def get_collaborators(self, project_id, **kwargs): - """ - Get collaborators for a project - - :Args: - project_id (uuid): The project's id - - Example:: - client.projects.get_collaborators( - project_id="123" - ) - - """ - endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def get_pending_collaborators(self, project_id, **kwargs): - """ - Get pending collaborators for a project - - :Args: - project_id (uuid): The project's id - - Example:: - client.projects.get_pending_collaborators( - project_id="123" - ) - - """ - endpoint = "/projects/{}/pending_collaborators".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def add_collaborator(self, project_id, email): - """ - Add Collaborator to a Project Collaborator. - - :Args: - project_id (uuid): The project id - email (string): Email user's e-mail address - - Example:: - client.projects.add_collaborator( - project_id="123", - email="janedoe@frame.io", - ) - """ - payload = {"email": email} - endpoint = '/projects/{}/collaborators'.format(project_id) - return self._api_call('post', endpoint, payload=payload) - - def remove_collaborator(self, project_id, email): - """ - Remove Collaborator from Project. - - :Args: - project_id (uuid): The Project ID. - email (string): The user's e-mail address - - Example:: - client.projects.remove_collaborator( - project_id="123", - email="janedoe@frame.io" - ) - """ - endpoint = '/projects/{}/collaborators/_?email={}'.format(project_id, email) - return self._api_call('delete', endpoint) diff --git a/frameioclient/service/service.py b/frameioclient/service/service.py deleted file mode 100644 index 159eb61d..00000000 --- a/frameioclient/service/service.py +++ /dev/null @@ -1,3 +0,0 @@ -class Service(object): - def __init__(self, client): - self.client = client diff --git a/frameioclient/service/teams.py b/frameioclient/service/teams.py deleted file mode 100644 index 75c01137..00000000 --- a/frameioclient/service/teams.py +++ /dev/null @@ -1,103 +0,0 @@ -import warnings -from .service import Service - -class Team(Service): - def create(self, account_id, **kwargs): - """ - Create a Team - - :Args: - account_id (string): The account id you want to create this Team under. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - - client.teams.create( - account_id="6bdcb4d9-4548-4548-4548-27a6c024ae6b", - name="My Awesome Project", - ) - """ - warnings.warn('Note: Your token must support team.create scopes') - endpoint = '/accounts/{}/teams'.format(account_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def list(self, account_id, **kwargs): - """ - Get teams owned by the specified account. - (To return all teams, use list_all()) - - :Args: - account_id (string): The account id. - """ - endpoint = '/accounts/{}/teams'.format(account_id) - return self.client._api_call('get', endpoint, kwargs) - - def list_all(self, **kwargs): - """ - Get all teams for the authenticated user. - - :Args: - account_id (string): The account id. - """ - endpoint = '/teams' - return self.client._api_call('get', endpoint, kwargs) - - def get(self, team_id): - """ - Get team by id - - :Args: - team_id (string): the Team's id - """ - endpoint = '/teams/{}'.format(team_id) - return self.client._api_call('get', endpoint) - - def get_members(self, team_id): - """ - Get the member list for a given Team. - - :Args: - team_id (string): The Team id. - """ - endpoint = '/teams/{}/members'.format(team_id) - return self.client._api_call('get', endpoint) - - def list_projects(self, team_id, **kwargs): - """ - Get projects owned by the Team. - - :Args: - team_id (string): The Team id. - """ - endpoint = '/teams/{}/projects'.format(team_id) - return self.client._api_call('get', endpoint, kwargs) - - def add_members(self, team_id, emails): - """ - Add a list of users via their e-mail address to a given Team. - - :Args: - team_id (string): The team id. - emails (list): The e-mails you want to add. - """ - payload = dict() - payload['batch'] = list(map(lambda email: {"email": email}, emails)) - - endpoint = '/batch/teams/{}/members'.format(team_id) - return self._api_call('post', endpoint, payload=payload) - - def remove_members(self, team_id, emails): - """ - Remove a list of users via their e-mail address from a given Team. - - :Args: - team_id (string): The team id. - emails (list): The e-mails you want to add. - """ - - payload = dict() - payload['batch'] = list(map(lambda email: {"email": email}, emails)) - - endpoint = '/batch/teams/{}/members'.format(team_id) - return self._api_call('delete', endpoint, payload=payload) diff --git a/frameioclient/service/users.py b/frameioclient/service/users.py deleted file mode 100644 index 36daa322..00000000 --- a/frameioclient/service/users.py +++ /dev/null @@ -1,8 +0,0 @@ -from .service import Service - -class User(Service): - def get_me(self): - """ - Get the current user. - """ - return self.client._api_call('get', '/me') \ No newline at end of file diff --git a/frameioclient/service/__init__.py b/frameioclient/services/__init__.py similarity index 59% rename from frameioclient/service/__init__.py rename to frameioclient/services/__init__.py index 0cd634d5..0efb1b65 100644 --- a/frameioclient/service/__init__.py +++ b/frameioclient/services/__init__.py @@ -4,4 +4,6 @@ from .logs import AuditLogs from .comments import Comment from .projects import Project -from .links import ReviewLink, PresentationLink \ No newline at end of file +from .search import Search +from .links import ReviewLink, PresentationLink +from .helpers import FrameioHelpers diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py new file mode 100644 index 00000000..3ce45c88 --- /dev/null +++ b/frameioclient/services/assets.py @@ -0,0 +1,417 @@ +import mimetypes +import os +from typing import Dict, List, Optional, Union +from uuid import UUID + +from frameioclient.lib.transfer import AWSClient + +from ..lib import ApiReference, FrameioDownloader, FrameioUploader, constants +from ..lib.service import Service +from .projects import Project + + +class Asset(Service): + def _build_asset_info(self, filepath: str) -> Dict: + full_path = os.path.abspath(filepath) + + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0], + } + + return file_info + + @ApiReference(operation="#getAsset") + def get(self, asset_id: Union[str, UUID]): + """ + Get an asset by id. + + :param asset_id: The asset id. + + Example:: + + client.assets.get( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + ) + + """ + endpoint = "/assets/{}".format(asset_id) + return self.client._api_call("get", endpoint) + + @ApiReference(operation="#getAssets") + def get_children( + self, + asset_id: Union[str, UUID], + includes: Optional[List] = [], + slim: Optional[bool] = False, + **kwargs, + ): + """ + Get a folder. + + :param asset_id: The asset id. + + :Keyword Arguments: + includes (list): List of includes you would like to add. + + Example:: + + client.assets.get_children( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + include=['review_links','cover_asset','creator','presentation'] + ) + """ + endpoint = "/assets/{}/children".format(asset_id) + + if slim == True: + query_params = "" + + if len(includes) > 0: + query_params += "?include={}".format(includes.join(",")) + else: + # Always include children + query_params += "?" + "include=children" + + # Only fields + query_params += ( + "&" + "only_fields=" + ",".join(constants.asset_excludes["only_fields"]) + ) + + # # Drop includes + query_params += ( + "&" + + "drop_includes=" + + ",".join(constants.asset_excludes["drop_includes"]) + ) + + # # Hard drop fields + query_params += ( + "&" + + "hard_drop_fields=" + + ",".join(constants.asset_excludes["hard_drop_fields"]) + ) + + # Excluded fields + # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) + + # # Sort by inserted_at + # query_params += '&' + 'sort=-inserted_at' + + endpoint += query_params + + # print("Final URL", endpoint) + + return self.client._api_call("get", endpoint, kwargs) + + @ApiReference(operation="#createAsset") + def create( + self, + parent_asset_id: Union[str, UUID], + name: str, + type: Optional[str] = "file", + filetype: Optional[str] = None, + filesize: Optional[int] = None, + ): + """ + Create an asset. + + :param parent_asset_id: The parent asset id + :param name: The asset's display name + :param type: The type of asset ('file', 'folder') + :param filesize: The size of the asset in bytes + :param filetype: The MIME-type of the asset + + Example:: + + client.assets.create( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + filetype="video/mp4", + filesize=123456 + ) + """ + kwargs = { + "name": name, + "type": type, + "filesize": filesize, + "filetype": filetype, + "properties": {"reference_id": "7eaa2f13-1202-42b3-a360-9d21e9a9efa7"}, + } + + endpoint = "/assets/{}/children".format(parent_asset_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + @ApiReference(operation="#createAsset") + def create_folder(self, parent_asset_id: str, name: str = "New Folder"): + """ + Create a new folder. + + :param parent_asset_id: The parent asset id. + :param name: The name of the new folder. + + Example:: + + client.assets.create_folder( + parent_asset_id="123abc", + name="ExampleFile.mp4", + ) + """ + endpoint = "/assets/{}/children".format(parent_asset_id) + return self.client._api_call( + "post", endpoint, payload={"name": name, "type": "folder"} + ) + + @ApiReference(operation="#createAsset") + def from_url(self, parent_asset_id: Union[str, UUID], name: str, url: str): + """ + Create an asset from a URL. + + :param parent_asset_id: The parent asset id. + :param name: The filename. + :param url: The remote URL. + + Example:: + + client.assets.from_url( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + url="https://" + ) + """ + payload = {"name": name, "type": "file", "source": {"url": url}} + + endpoint = "/assets/{}/children".format(parent_asset_id) + return self.client._api_call("post", endpoint, payload=payload) + + @ApiReference(operation="#updateAsset") + def update(self, asset_id: Union[str, UUID], **kwargs): + """ + Updates an asset + + :param asset_id: The asset's id + + :Keyword Arguments: + the fields to update + + Example:: + + client.assets.update("adeffee123342", name="updated_filename.mp4") + """ + endpoint = "/assets/{}".format(asset_id) + return self.client._api_call("put", endpoint, kwargs) + + @ApiReference(operation="#copyAsset") + def copy( + self, + destination_folder_id: Union[str, UUID], + target_asset_id: Union[str, UUID], + ): + """ + Copy an asset + + :param destination_folder_id: The id of the folder you want to copy into. + :param target_asset_id: The id of the asset you want to copy. + + Example:: + + client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") + """ + kwargs = {"id": target_asset_id} + endpoint = "/assets/{}/copy".format(destination_folder_id) + return self.client._api_call("post", endpoint, kwargs) + + @ApiReference(operation="#batchCopyAsset") + def bulk_copy( + self, + destination_folder_id: Union[str, UUID], + asset_list: Optional[List] = [], + copy_comments: Optional[bool] = False, + ): + """ + Bulk copy assets + + :param destination_folder_id: The id of the folder you want to copy into. + :param asset_list: A list of the asset IDs you want to copy. + :param copy_comments: Whether or not to copy comments: True or False. + + Example:: + + client.assets.bulk_copy( + "adeffee123342", + asset_list=[ + "7ee008c5-49a2-f8b5-997d-8b64de153c30", + "7ee008c5-49a2-f8b5-997d-8b64de153c30" + ], + copy_comments=True + ) + """ + payload = {"batch": []} + + if copy_comments: + payload["copy_comments"] = "all" + + for asset in asset_list: + payload["batch"].append({"id": asset}) + + endpoint = "/batch/assets/{}/copy".format(destination_folder_id) + return self.client._api_call("post", endpoint, payload) + + def add_version( + self, target_asset_id: Union[str, UUID], new_version_id: Union[str, UUID] + ): + """ + Add a new version to a version stack, or create a new one! + + :param target_asset_id: The main/destination Asset or Version Stack. + :param new_version_id: The id for the asset you want to add to the Version Stack or create a new one with. + + Example:: + + client.add_version_to_asset( + destination_id="123", + next_asset_id="234" + ) + """ + + payload = {"next_asset_id": new_version_id} + + endpoint = f"/assets/{target_asset_id}/version" + + return self.client._api_call("post", endpoint, payload=payload) + + @ApiReference(operation="#deleteAsset") + def delete(self, asset_id: Union[str, UUID]): + """ + Delete an asset + + :param asset_id: the asset's id + """ + endpoint = "/assets/{}".format(asset_id) + return self.client._api_call("delete", endpoint) + + def _upload(self, asset: Dict, file: object): + """ + Upload an asset. The method will exit once the file is uploaded. + + :param asset: The asset object as returned via the frame.io API. + :param file: The file to upload. + + Example:: + + client.upload(asset, open('example.mp4')) + """ + uploader = FrameioUploader(asset, file) + uploader.upload() + + def upload( + self, + destination_id: Union[str, UUID], + filepath: str, + asset: Optional[Dict] = None, + ): + """ + Upload a file. The method will exit once the file is uploaded. + + :param destination_id: The destination Project or Folder ID. + :param filepath: The location of the file on your local filesystem that you want to upload. + + Example:: + + client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") + """ + + # Check if destination is a project or folder + # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided + # Then we start our upload + + try: + # First try to grab it as a folder + folder_id = self.get(destination_id)["id"] + except Exception as e: + # Then try to grab it as a project + folder_id = Project(self.client).get(destination_id)["root_asset_id"] + finally: + file_info = self._build_asset_info(filepath) + + if not asset: + try: + asset = self.create( + folder_id, + type="file", + name=file_info["filename"], + filetype=file_info["mimetype"], + filesize=file_info["filesize"], + ) + + except Exception as e: + print(e) + + try: + with open(file_info["filepath"], "rb") as fp: + self._upload(asset, fp) + + except Exception as e: + print(e) + + else: + with open(file_info["filepath"], "rb") as fp: + self._upload(asset, fp) + + return asset + + def download( + self, + asset: Dict, + download_folder: str, + prefix: Optional[str] = None, + multi_part: Optional[bool] = None, + replace: Optional[bool] = False, + ): + """ + Download an asset. The method will exit once the file is downloaded. + + :param asset: The asset object. + :param download_folder: The location to download the file to. + :param multi_part: Attempt to do a multi-part download (non-WMID assets). + :param replace: Whether or not you want to replace a file if one is found at the destination path. + + Example:: + + client.assets.download(asset, "~./Downloads") + """ + downloader = FrameioDownloader( + asset, download_folder, prefix, multi_part, replace + ) + return AWSClient(downloader, concurrency=5).multi_thread_download() + + def upload_folder(self, source_path: str, destination_id: Union[str, UUID]): + """ + Upload a folder full of assets, maintaining hierarchy. \ + The method will exit once the file is uploaded. + + :param filepath: The location of the folder on your disk. + :param destination_id: The destination Project or Folder ID. + + Example:: + + client.assets.upload("./file.mov", "1231-12414-afasfaf-aklsajflaksjfla") + """ + + # Check if destination is a project or folder + # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided + # Then we start our upload + + try: + # First try to grab it as a folder + folder_id = self.get(destination_id)["id"] + except Exception as e: + # Then try to grab it as a project + folder_id = Project(self.client).get(destination_id)["root_asset_id"] + finally: + return FrameioUploader().recursive_upload( + self.client, source_path, folder_id + ) diff --git a/frameioclient/services/comments.py b/frameioclient/services/comments.py new file mode 100644 index 00000000..5d797183 --- /dev/null +++ b/frameioclient/services/comments.py @@ -0,0 +1,125 @@ +from typing import Optional, Union +from uuid import UUID + +from ..lib.service import Service +from ..lib.utils import ApiReference + + +class Comment(Service): + @ApiReference(operation="#createComment") + def create( + self, + asset_id: Union[str, UUID], + text: Optional[str] = None, + timestamp: Optional[int] = None, + annotation: Optional[str] = None, + **kwargs + ): + """ + Create a comment. + + :param asset_id: The asset id. + :param text: The comment text. + :param timestamp: The timestamp of the comment. + :param annotation: The serialized contents of the annotation. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.comments.create( + asset_id="123abc", + text="Hello world", + timestamp=10 + ) + """ + kwargs = {"text": text, "annotation": annotation, "timestamp": timestamp} + + endpoint = "/assets/{}/comments".format(asset_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + @ApiReference(operation="#getComment") + def get(self, comment_id: Union[str, UUID], **kwargs): + """ + Get a comment. + + :param comment_id: The comment id. + """ + endpoint = "/comments/{}".format(comment_id) + return self.client._api_call("get", endpoint, **kwargs) + + @ApiReference(operation="#getComments") + def list(self, asset_id: Union[str, UUID], **kwargs): + """ + Get an asset's comments. + + :param asset_id: The asset id. + """ + endpoint = "/assets/{}/comments".format(asset_id) + return self.client._api_call("get", endpoint, **kwargs) + + @ApiReference(operation="#updateComment") + def update( + self, + comment_id: Union[str, UUID], + text: Optional[str] = None, + timestamp: Optional[int] = None, + annotation: Optional[str] = None, + **kwargs + ): + """ + Update a comment. + + :param comment_id: The comment id. + :param text: The comment text. + :param timestamp: The timestamp of the comment. + :param annotation: The serialized contents of the annotation. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.comments.update( + comment_id="123abc", + text="Hello world", + timestamp=10 + ) + """ + + kwargs = {"text": text, "annotation": annotation, "timestamp": timestamp} + + endpoint = "/comments/{}".format(comment_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + @ApiReference(operation="#deleteComment") + def delete(self, comment_id: Union[str, UUID]): + """ + Delete a comment. + + :param comment_id: The comment id. + """ + endpoint = "/comments/{}".format(comment_id) + return self.client._api_call("delete", endpoint) + + @ApiReference(operation="#createReply") + def reply(self, comment_id, **kwargs): + """ + Reply to an existing comment. + + Args: + comment_id (string): The comment id. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.comments.reply( + comment_id="123abc", + text="Hello world" + ) + """ + endpoint = "/comments/{}/replies".format(comment_id) + return self.client._api_call("post", endpoint, payload=kwargs) diff --git a/frameioclient/services/helpers.py b/frameioclient/services/helpers.py new file mode 100644 index 00000000..75955403 --- /dev/null +++ b/frameioclient/services/helpers.py @@ -0,0 +1,163 @@ +import os + +from pathlib import Path +from time import time, sleep + +from ..lib.service import Service +from ..lib.utils import Utils + +from copy import deepcopy +from typing import List +from pprint import pprint + + +class FrameioHelpers(Service): + def get_updated_assets(self, account_id, project_id, timestamp): + """ + Get assets added or updated since timestamp. + + :Args: + account_id (string): The account id. + project_id (string): The project id. + timestamp (string): ISO 8601 UTC format. + (datetime.now(timezone.utc).isoformat()) + """ + payload = { + "account_id": account_id, + "page": 1, + "page_size": 50, + "include": "children", + "sort": "-inserted_at", + "filter": { + "project_id": {"op": "eq", "value": project_id}, + "updated_at": {"op": "gte", "value": timestamp}, + }, + } + endpoint = "/search/library" + return self.client._api_call("post", endpoint, payload=payload) + + def get_assets_recursively(self, asset_id, slim=True): + assets = self.client.assets.get_children(asset_id, slim=slim) + print("Number of assets at top level", len(assets)) + + for index, asset in enumerate(assets): + # try: + print( + f"Type: {asset['_type']}, Name: {asset['name']}, Children: {len(asset['children'])}" + ) + # except KeyError: + # print("No children found") + + total_bytes = 0 + + if asset["_type"] == "file": + # Don't do nothing, it's a file! + continue + + if asset["_type"] == "version_stack": + print("Grabbing top item from version stack") + versions = self.client.assets.get_children(asset["id"], slim=True) + assets[index]['children'] = versions # re-assign on purpose + continue + + # We only get the first three items when we use "include=children" + if asset["_type"] == "folder": + # try: + if asset["item_count"] > 3: + # Recursively fetch the contents of the folder because we have to + asset["children"] = self.get_assets_recursively(asset["id"], slim) + print("Grabbed more items for this sub dir") + + else: + for i in asset["children"]: + # If a folder is found, we still need to recursively search it + if i["_type"] == "folder": + i["children"] = self.get_assets_recursively(i["id"], slim) + + # except KeyError as e: + # # No children found in this folder, move on + # print(e) + # continue + + return assets + + def build_project_tree(self, project_id, slim=True): + # if slim == True: + # self.client.assets.get_children() + + # Get project info + project = self.client.projects.get(project_id) + + # Get children + initial_tree = self.get_assets_recursively(project["root_asset_id"], slim) + + return initial_tree + + def download_project(self, project_id, destination): + project = self.client.projects.get(project_id) + initial_tree = self.get_assets_recursively(project["root_asset_id"]) + self.recursive_downloader(destination, initial_tree) + + def recursive_downloader(self, directory, asset, manifest=[]): + print(f"Directory {directory}") + + try: + # First check to see if we need to make the directory + target_directory = os.path.join(os.path.curdir, directory) + if not os.path.isdir(target_directory): + os.mkdir(os.path.abspath(target_directory)) + + except Exception as e: + target_directory = os.path.abspath(os.path.join(os.path.curdir, directory)) + print(e) + + if type(asset) == list: + for i in asset: + self.recursive_downloader(directory, i) + + else: + try: + if asset["_type"] == "folder": + if len(asset["children"]) >= 0: + # count += 1 + # Create the new folder that these items will go in before it's too late + if not os.path.exists( + os.path.join(target_directory, asset["name"]) + ): + print("Path doesn't exist") + new_path = Path( + target_directory, str(asset["name"]).replace("/", "-") + ) + print(new_path.absolute) + print("Making new directory") + Path.mkdir(new_path) + sleep(2) + + # Pass along the new directory they'll be living in and the children + self.recursive_downloader( + f"{directory}/{str(asset['name']).replace('/', '-')}", + asset["children"], + manifest + ) + + if asset["_type"] == "file": + # count += 1 + fn = self.client.assets.download( + asset, target_directory, multi_part=True + ) + manifest.append({ + "asset_id": asset['id'], + "file_path": fn, + "directory": target_directory + }) + + + except Exception as e: + print(e) + + pprint(manifest) + return True + + +if __name__ == "__main__": + pass diff --git a/frameioclient/services/links.py b/frameioclient/services/links.py new file mode 100644 index 00000000..731f02c9 --- /dev/null +++ b/frameioclient/services/links.py @@ -0,0 +1,134 @@ +from ..lib.utils import ApiReference +from ..lib.service import Service + + +class ReviewLink(Service): + @ApiReference(operation="#reviewLinkCreate") + def create(self, project_id, **kwargs): + """ + Create a review link. + + Args: + project_id (string): The project id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.review_links.create( + project_id="123", + name="My Review Link", + password="abc123" + ) + """ + endpoint = "/projects/{}/review_links".format(project_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + @ApiReference(operation="#reviewLinksList") + def list(self, project_id): + """ + Get the review links of a project + + Args: + asset_id (string): The asset id. + """ + endpoint = "/projects/{}/review_links".format(project_id) + return self.client._api_call("get", endpoint) + + @ApiReference(operation="#reviewLinkGet") + def get(self, link_id, **kwargs): + """ + Get a single review link + + Args: + link_id (string): The review link id. + """ + endpoint = "/review_links/{}".format(link_id) + return self.client._api_call("get", endpoint, payload=kwargs) + + @ApiReference(operation="#reviewLinkItemsList") + def get_assets(self, link_id): + """ + Get items from a single review link. + + Args: + link_id (string): The review link id. + + Example:: + + client.review_links.get_assets( + link_id="123" + ) + """ + endpoint = "/review_links/{}/items".format(link_id) + return self.client._api_call("get", endpoint) + + @ApiReference(operation="#reviewLinkItemsUpdate") + def update_assets(self, link_id, **kwargs): + """ + Add or update assets for a review link. + + Args: + link_id (string): The review link id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.review_links.update_assets( + link_id="123", + asset_ids=["abc","def"] + ) + """ + endpoint = "/review_links/{}/assets".format(link_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + @ApiReference(operation="#reviewLinkUpdate") + def update_settings(self, link_id, **kwargs): + """ + Updates review link settings. + + Args: + link_id (string): The review link id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.review_links.update_settings( + link_id, + expires_at="2020-04-08T12:00:00+00:00", + is_active=False, + name="Review Link 123", + password="my_fun_password", + ) + """ + endpoint = "/review_links/{}".format(link_id) + return self.client._api_call("put", endpoint, payload=kwargs) + + +class PresentationLink(Service): + @ApiReference(operation="#createPresentation") + def create(self, asset_id, **kwargs): + """ + Create a presentation link. + + Args: + asset_id (string): The asset id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.presentation_links.create( + asset_id="9cee7966-4066-b326-7db1-f9e6f5e929e4", + title="My fresh presentation", + password="abc123" + ) + """ + endpoint = "/assets/{}/presentations".format(asset_id) + return self.client._api_call("post", endpoint, payload=kwargs) diff --git a/frameioclient/services/logs.py b/frameioclient/services/logs.py new file mode 100644 index 00000000..b159448f --- /dev/null +++ b/frameioclient/services/logs.py @@ -0,0 +1,24 @@ +from typing import Union +from uuid import UUID + +from ..lib.service import Service + + +class AuditLogs(Service): + def list(self, account_id: Union[str, UUID]): + """ + Get audit logs for the currently authenticated account. + + :param account_id: Account ID you want to get audit logs for. + + Example:: + + client.logs.list( + account_id="6bdcb4d9-9a2e-a765-4548-ae6b27a6c024" + ) + + Returns: + list: List of audit logs. + """ + endpoint = "/accounts/{}/audit_logs".format(account_id) + return self.client._api_call("get", endpoint) diff --git a/frameioclient/services/projects.py b/frameioclient/services/projects.py new file mode 100644 index 00000000..a5829c27 --- /dev/null +++ b/frameioclient/services/projects.py @@ -0,0 +1,159 @@ +from typing import Union, Optional +from uuid import UUID + +from ..lib.service import Service +from .helpers import FrameioHelpers + + +class Project(Service): + def create(self, team_id: Union[str, UUID], **kwargs): + """ + Create a project. + + :param team_id: The team id. + + :Kwargs: + kwargs (optional): additional request parameters. + + Example:: + + client.projects.create( + team_id="123", + name="My Awesome Project" + ) + """ + + endpoint = "/teams/{}/projects".format(team_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def get(self, project_id: Union[str, UUID]): + """ + Get an individual project + + :param project_id: The project's id + + Example:: + + client.project.get( + project_id="123" + ) + """ + + endpoint = "/projects/{}".format(project_id) + return self.client._api_call("get", endpoint) + + def tree(self, project_id: Union[str, UUID], slim: Optional[bool] = False): + """ + Fetch a tree representation of all files/folders in a project. + + :param project_id: The project's id + :param slim: If true, fetch only the minimum information for the following: \ + filename, \ + filesize, \ + thumbnail, \ + creator_id, \ + inserted_at (date created), \ + path (represented like a filesystem) + + Example:: + + client.projects.get( + project_id="123", + slim=True + ) + """ + + # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) + # return self.client._api_call('get', endpoint) + + return FrameioHelpers(self.client).build_project_tree(project_id, slim) + + def download(self, project_id: Union[str, UUID], destination_directory="downloads"): + """ + Download the provided project to disk. + + :param project_id: The project's id. + :param destination_directory: Directory on disk that you want to download the project to. + + Example:: + + client.projects.download( + project_id="123", + destination_directory="./downloads" + ) + """ + + return FrameioHelpers(self.client).download_project( + project_id, destination=destination_directory + ) + + def get_collaborators(self, project_id: Union[str, UUID], **kwargs): + """ + Get collaborators for a project + + :param project_id: The project's id + + Example:: + + client.projects.get_collaborators( + project_id="123" + ) + """ + + endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) + return self.client._api_call("get", endpoint, kwargs) + + def get_pending_collaborators(self, project_id: Union[str, UUID], **kwargs): + """ + Get pending collaborators for a project + + :param project_id: The project's id + + Example:: + + client.projects.get_pending_collaborators( + project_id="123" + ) + """ + + endpoint = "/projects/{}/pending_collaborators".format(project_id) + return self.client._api_call("get", endpoint, kwargs) + + def add_collaborator(self, project_id: Union[str, UUID], email: str): + """ + Add Collaborator to a Project Collaborator. + + :param project_id: The project id + :param email: Email user's e-mail address + + Example:: + + client.projects.add_collaborator( + project_id="123", + email="janedoe@frame.io", + ) + """ + + payload = {"email": email} + endpoint = "/projects/{}/collaborators".format(project_id) + return self.client._api_call("post", endpoint, payload=payload) + + def remove_collaborator(self, project_id: Union[str, UUID], email: str): + """ + Remove Collaborator from Project. + + :param project_id: The Project ID. + :param email: The user's e-mail address + + Example:: + + client.projects.remove_collaborator( + project_id="123", + email="janedoe@frame.io" + ) + """ + + # TODO update this function to not use query parameter based email input + + endpoint = "/projects/{}/collaborators/_?email={}".format(project_id, email) + return self.client._api_call("delete", endpoint) diff --git a/frameioclient/services/search.py b/frameioclient/services/search.py new file mode 100644 index 00000000..31067f09 --- /dev/null +++ b/frameioclient/services/search.py @@ -0,0 +1,75 @@ +from typing import Optional, Union +from uuid import UUID + +from ..lib.service import Service + + +class Search(Service): + def library( + self, + query: str, + type: Optional[str] = None, + project_id: Union[str, UUID] = None, + account_id: Union[str, UUID] = None, + team_id: Union[str, UUID] = None, + uploader: Optional[str] = None, + sort: Optional[str] = None, + filter: Optional[str] = None, + page_size: Optional[int] = 10, + page: Optional[int] = 1, + ): + """ + Search for assets using the library search endpoint, documented at https://developer.frame.io/docs/workflows-assets/search-for-assets. + For more information check out https://developer.frame.io/api/reference/operation/librarySearchPost/. + + # TODO, confirm that account_id is required or not, could we use self.me? + + :param query: The search keyword you want to search with. + :param account_id: The frame.io account want you to contrain your search to (you may only have one, but some users have 20+ that they have acces to). + :param type: The type of frame.io asset you want to search: [file, folder, review_link, presentation]. + :param project_id: The frame.io project you want to constrain your search to. + :param team_id: The frame.io team you want to constrain your search to. + :param uploader: The name of the uploader, this includes first + last name with a space. + :param sort: The field you want to sort by. + :param filter: This is only necessary if you want to build a fully custom query, the most common functionality is exposed using other kwargs though. + :param page_size: Useful if you want to increase the number of items returned by the search API here. + :param page: The page of results you're requesting. + + Example:: + + client.search.library( + query="Final", + type="file", + sort="name" + ) + """ + + # Define base payload + payload = { + "account_id": account_id, + "q": query, + "sort": sort, + "page_size": page_size, + "page": page, + } + + # Add fully custom filter + if filter is not None: + payload["filter"] = filter + + # Add simple filters + if project_id is not None: + payload["filter"]["project_id"] = {"op": "eq", "value": project_id} + if team_id is not None: + payload["filter"]["team_id"] = {"op": "eq", "value": team_id} + if type is not None: + payload["filter"]["type"] = {"op": "eq", "value": type} + if uploader is not None: + payload["filter"]["creator.name"] = {"op": "match", "value": uploader} + + # Add sorting + if sort is not None: + payload["sort"] = sort + + endpoint = "/search/library" + return self.client._api_call("post", endpoint, payload=payload) diff --git a/frameioclient/services/teams.py b/frameioclient/services/teams.py new file mode 100644 index 00000000..6c3ee306 --- /dev/null +++ b/frameioclient/services/teams.py @@ -0,0 +1,105 @@ +import warnings +from ..lib.service import Service + + +class Team(Service): + def create(self, account_id, **kwargs): + """ + Create a Team + + Args: + account_id (string): The account id you want to create this team under. + + :Keyword Arguments:: + (optional) kwargs: additional request parameters. + + Example:: + + client.teams.create( + account_id="6bdcb4d9-4548-4548-4548-27a6c024ae6b", + name="My Awesome Project", + ) + """ + warnings.warn("Note: Your token must support the team.create scope") + endpoint = "/accounts/{}/teams".format(account_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def list(self, account_id, **kwargs): + """ + Get teams owned by the specified account. \ + (To return all teams, use list_all()) + + Args: + account_id (string): The account id. + """ + endpoint = "/accounts/{}/teams".format(account_id) + return self.client._api_call("get", endpoint, kwargs) + + def list_all(self, **kwargs): + """ + Get all teams for the authenticated user. + + Args: + account_id (string): The account id. + """ + endpoint = "/teams" + return self.client._api_call("get", endpoint, kwargs) + + def get(self, team_id): + """ + Get team by id + + Args: + team_id (string): the team's id + """ + endpoint = "/teams/{}".format(team_id) + return self.client._api_call("get", endpoint) + + def get_members(self, team_id): + """ + Get the member list for a given team. + + Args: + team_id (string): The team id. + """ + endpoint = "/teams/{}/members".format(team_id) + return self.client._api_call("get", endpoint) + + def list_projects(self, team_id, **kwargs): + """ + Get projects owned by the team. + + Args: + team_id (string): The team id. + """ + endpoint = "/teams/{}/projects".format(team_id) + return self.client._api_call("get", endpoint, kwargs) + + def add_members(self, team_id, emails): + """ + Add a list of users via their e-mail address to a given team. + + Args: + team_id (string): The team id. + emails (list): The e-mails you want to add. + """ + payload = dict() + payload["batch"] = list(map(lambda email: {"email": email}, emails)) + + endpoint = "/batch/teams/{}/members".format(team_id) + return self.client._api_call("post", endpoint, payload=payload) + + def remove_members(self, team_id, emails): + """ + Remove a list of users via their e-mail address from a given team. + + Args: + team_id (string): The team id. + emails (list): The e-mails you want to add. + """ + + payload = dict() + payload["batch"] = list(map(lambda email: {"email": email}, emails)) + + endpoint = "/batch/teams/{}/members".format(team_id) + return self.client._api_call("delete", endpoint, payload=payload) diff --git a/frameioclient/services/users.py b/frameioclient/services/users.py new file mode 100644 index 00000000..f23dc213 --- /dev/null +++ b/frameioclient/services/users.py @@ -0,0 +1,17 @@ +from typing import Dict + +from ..lib.service import Service + + +class User(Service): + def get_me(self): + """ + Get the current user. + """ + return self.client._api_call("get", "/me") + + def get_accounts(self): + """ + Get a list of accounts the user has access to + """ + return self.client._api_call("get", "/accounts") diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..9f751d69 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1284 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = true +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "analytics-python" +version = "1.4.post1" +description = "The hassle-free way to integrate analytics into any python application." +optional = false +python-versions = "*" +files = [ + {file = "analytics-python-1.4.post1.tar.gz", hash = "sha256:b083e69c149c39e7ad17067f0e5c1742fbd15fdc469ade36c4d1ad5edf31ee5e"}, + {file = "analytics_python-1.4.post1-py2.py3-none-any.whl", hash = "sha256:33ab660150d0f37bb2fefc93fd19c9e7bd85e5b17db44df5e7e1139f63c14246"}, +] + +[package.dependencies] +backoff = "1.10.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +test = ["flake8 (==3.7.9)", "mock (==2.0.0)", "pylint (==1.9.3)"] + +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" +optional = false +python-versions = "*" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = true +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "backoff" +version = "1.10.0" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "backoff-1.10.0-py2.py3-none-any.whl", hash = "sha256:5e73e2cbe780e1915a204799dba0a01896f45f4385e636bcca7a0614d879d0cd"}, + {file = "backoff-1.10.0.tar.gz", hash = "sha256:b8fba021fac74055ac05eb7c7bfce4723aedde6cd0a504e5326bcb0bdd6d19a4"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = true +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "blessed" +version = "1.20.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +optional = false +python-versions = ">=2.7" +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] + +[package.dependencies] +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +six = ">=1.9.0" +wcwidth = ">=0.1.4" + +[[package]] +name = "bump2version" +version = "1.0.1" +description = "Version-bump your software with a single command!" +optional = false +python-versions = ">=3.5" +files = [ + {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, + {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = true +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contentful-management" +version = "2.13.1" +description = "Contentful Management API Client" +optional = true +python-versions = "*" +files = [ + {file = "contentful_management-2.13.1.tar.gz", hash = "sha256:23718aeede4e0adee928c49142ac0828604c02f15a6ab76e765719422cf84d42"}, +] + +[package.dependencies] +python-dateutil = "*" +requests = ">=2.20.0,<3.0" + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] + +[[package]] +name = "enlighten" +version = "1.12.4" +description = "Enlighten Progress Bar" +optional = false +python-versions = "*" +files = [ + {file = "enlighten-1.12.4-py2.py3-none-any.whl", hash = "sha256:5c53c57441bc5986c1d02f2f539aead9d59a206783641953a49b8d995db6b584"}, + {file = "enlighten-1.12.4.tar.gz", hash = "sha256:75f3d92b49e0ef5e454fc1a0f39dc0ab8f6d9946cbe534db3ded3010217d5b5f"}, +] + +[package.dependencies] +blessed = ">=1.17.7" +prefixed = ">=0.3.2" + +[[package]] +name = "furl" +version = "2.1.3" +description = "URL manipulation made simple." +optional = false +python-versions = "*" +files = [ + {file = "furl-2.1.3-py2.py3-none-any.whl", hash = "sha256:9ab425062c4217f9802508e45feb4a83e54324273ac4b202f1850363309666c0"}, + {file = "furl-2.1.3.tar.gz", hash = "sha256:5a6188fe2666c484a12159c18be97a1977a71d632ef5bb867ef15f54af39cc4e"}, +] + +[package.dependencies] +orderedmultidict = ">=1.0.1" +six = ">=1.8.0" + +[[package]] +name = "furo" +version = "2022.9.29" +description = "A clean customisable Sphinx documentation theme." +optional = true +python-versions = ">=3.7" +files = [ + {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, + {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=4.0,<6.0" +sphinx-basic-ng = "*" + +[[package]] +name = "html2text" +version = "2020.1.16" +description = "Turn HTML into equivalent Markdown-structured text." +optional = true +python-versions = ">=3.5" +files = [ + {file = "html2text-2020.1.16-py3-none-any.whl", hash = "sha256:c7c629882da0cf377d66f073329ccf34a12ed2adf0169b9285ae4e63ef54c82b"}, + {file = "html2text-2020.1.16.tar.gz", hash = "sha256:e296318e16b059ddb97f7a8a1d6a5c1d7af4544049a01e261731d2d5cc277bbb"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "4.13.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = true +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jinxed" +version = "1.2.1" +description = "Jinxed Terminal Library" +optional = false +python-versions = "*" +files = [ + {file = "jinxed-1.2.1-py2.py3-none-any.whl", hash = "sha256:37422659c4925969c66148c5e64979f553386a4226b9484d910d3094ced37d30"}, + {file = "jinxed-1.2.1.tar.gz", hash = "sha256:30c3f861b73279fea1ed928cfd4dfb1f273e16cd62c8a32acfac362da0f78f3f"}, +] + +[package.dependencies] +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +optional = true +python-versions = "*" +files = [ + {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, + {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, +] + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = true +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" +optional = false +python-versions = "*" +files = [ + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, +] + +[[package]] +name = "munch" +version = "4.0.0" +description = "A dot-accessible dictionary (a la JavaScript objects)" +optional = true +python-versions = ">=3.6" +files = [ + {file = "munch-4.0.0-py2.py3-none-any.whl", hash = "sha256:71033c45db9fb677a0b7eb517a4ce70ae09258490e419b0e7f00d1e386ecb1b4"}, + {file = "munch-4.0.0.tar.gz", hash = "sha256:542cb151461263216a4e37c3fd9afc425feeaf38aaa3025cd2a981fadb422235"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} + +[package.extras] +testing = ["astroid (>=2.0)", "coverage", "pylint (>=2.3.1,<2.4.0)", "pytest"] +yaml = ["PyYAML (>=5.1.0)"] + +[[package]] +name = "orderedmultidict" +version = "1.0.1" +description = "Ordered Multivalue Dictionary" +optional = false +python-versions = "*" +files = [ + {file = "orderedmultidict-1.0.1-py2.py3-none-any.whl", hash = "sha256:43c839a17ee3cdd62234c47deca1a8508a3f2ca1d0678a3bf791c87cf84adbf3"}, + {file = "orderedmultidict-1.0.1.tar.gz", hash = "sha256:04070bbb5e87291cc9bfa51df413677faf2141c73c61d2a5f7b26bea3cd882ad"}, +] + +[package.dependencies] +six = ">=1.8.0" + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = true +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "prefixed" +version = "0.7.0" +description = "Prefixed alternative numeric library" +optional = false +python-versions = "*" +files = [ + {file = "prefixed-0.7.0-py2.py3-none-any.whl", hash = "sha256:537b0e4ff4516c4578f277a41d7104f769d6935ae9cdb0f88fed82ec7b3c0ca5"}, + {file = "prefixed-0.7.0.tar.gz", hash = "sha256:0b54d15e602eb8af4ac31b1db21a37ea95ce5890e0741bb0dd9ded493cefbbe9"}, +] + +[[package]] +name = "pydash" +version = "7.0.6" +description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." +optional = true +python-versions = ">=3.7" +files = [ + {file = "pydash-7.0.6-py3-none-any.whl", hash = "sha256:10e506935953fde4b0d6fe21a88e17783cd1479256ae96f285b5f89063b4efd6"}, + {file = "pydash-7.0.6.tar.gz", hash = "sha256:7d9df7e9f36f2bbb08316b609480e7c6468185473a21bdd8e65dda7915565a26"}, +] + +[package.dependencies] +typing-extensions = ">=3.10,<4.6.0 || >4.6.0" + +[package.extras] +dev = ["Sphinx", "black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "importlib-metadata (<5)", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = true +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyparsing" +version = "3.1.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = true +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "0.19.2" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.5" +files = [ + {file = "python-dotenv-0.19.2.tar.gz", hash = "sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"}, + {file = "python_dotenv-0.19.2-py2.py3-none-any.whl", hash = "sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-frontmatter" +version = "1.1.0" +description = "Parse and manage posts with YAML (or other) frontmatter" +optional = true +python-versions = "*" +files = [ + {file = "python-frontmatter-1.1.0.tar.gz", hash = "sha256:7118d2bd56af9149625745c58c9b51fb67e8d1294a0c76796dafdc72c36e5f6d"}, + {file = "python_frontmatter-1.1.0-py3-none-any.whl", hash = "sha256:335465556358d9d0e6c98bbeb69b1c969f2a4a21360587b9873bfc3b213407c1"}, +] + +[package.dependencies] +PyYAML = "*" + +[package.extras] +docs = ["sphinx"] +test = ["mypy", "pyaml", "pytest", "toml", "types-PyYAML", "types-toml"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = true +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = true +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = true +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.4.1" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = true +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, + {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, +] + +[[package]] +name = "sphinx" +version = "4.5.0" +description = "Python documentation generator" +optional = true +python-versions = ">=3.6" +files = [ + {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, + {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] + +[[package]] +name = "sphinx-autobuild" +version = "2021.3.14" +description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." +optional = true +python-versions = ">=3.6" +files = [ + {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, + {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, +] + +[package.dependencies] +colorama = "*" +livereload = "*" +sphinx = "*" + +[package.extras] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "1.19.1" +description = "Type hints (PEP 484) support for the Sphinx autodoc extension" +optional = true +python-versions = ">=3.7" +files = [ + {file = "sphinx_autodoc_typehints-1.19.1-py3-none-any.whl", hash = "sha256:9be46aeeb1b315eb5df1f3a7cb262149895d16c7d7dcd77b92513c3c3a1e85e6"}, + {file = "sphinx_autodoc_typehints-1.19.1.tar.gz", hash = "sha256:6c841db55e0e9be0483ff3962a2152b60e79306f4288d8c4e7e86ac84486a5ea"}, +] + +[package.dependencies] +Sphinx = ">=4.5" + +[package.extras] +testing = ["covdefaults (>=2.2)", "coverage (>=6.3)", "diff-cover (>=6.4)", "nptyping (>=2.1.2)", "pytest (>=7.1)", "pytest-cov (>=3)", "sphobjinv (>=2)", "typing-extensions (>=4.1)"] +type-comments = ["typed-ast (>=1.5.2)"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +description = "A modern skeleton for Sphinx themes." +optional = true +python-versions = ">=3.7" +files = [ + {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, + {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, +] + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinx-jekyll-builder" +version = "0.3.0" +description = "sphinx builder that outputs jekyll compatible markdown files with frontmatter" +optional = true +python-versions = "*" +files = [ + {file = "sphinx-jekyll-builder-0.3.0.tar.gz", hash = "sha256:5ecc0a1821849fc55c4b26e0efb8fb45454904c6900378ebd776f8a74d6e06f7"}, + {file = "sphinx_jekyll_builder-0.3.0-py2.py3-none-any.whl", hash = "sha256:b3b2d46ba49b7d47d8f58077c9ccce928f2cfec6d3fce7738c69f1590c72ebc5"}, +] + +[package.dependencies] +alabaster = ">=0.7.12" +Babel = ">=2.6.0" +certifi = ">=2018.11.29" +chardet = ">=3.0.4" +docutils = ">=0.14" +html2text = ">=2018.1.9" +idna = ">=3.7" +imagesize = ">=1.1.0" +Jinja2 = ">=2.10.1" +MarkupSafe = ">=1.1.0" +munch = ">=2.3.2" +packaging = ">=19.0" +pydash = ">=4.7.4" +Pygments = ">=2.3.1" +pyparsing = ">=2.3.1" +pytz = ">=2018.9" +PyYAML = ">=5.1" +requests = ">=2.21.0" +six = ">=1.12.0" +snowballstemmer = ">=1.2.1" +Sphinx = ">=1.8.3" +sphinx-markdown-builder = ">=0.5.3" +sphinxcontrib-websupport = ">=1.1.0" +typing = ">=3.6.6" +urllib3 = ">=1.24.2" + +[[package]] +name = "sphinx-markdown-builder" +version = "0.6.5" +description = "A Sphinx extension to add markdown generation support." +optional = true +python-versions = ">=3.7" +files = [ + {file = "sphinx-markdown-builder-0.6.5.tar.gz", hash = "sha256:dae3184cfefdfe9ee1af69ae9e6e09cf2768f51afeb81ae1b3c219dbfdb33e97"}, + {file = "sphinx_markdown_builder-0.6.5-py3-none-any.whl", hash = "sha256:59c8e841b56bbf04a2c11e1984f7258fa28a20c0257aa54ea3ae7a0013a27d4a"}, +] + +[package.dependencies] +docutils = "*" +sphinx = ">=2.2.0" +tabulate = "*" + +[package.extras] +dev = ["black", "bumpver", "coveralls", "flake8", "isort", "pip-tools", "pylint", "pytest", "pytest-cov", "sphinx-needs", "sphinxcontrib-plantuml"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = true +python-versions = ">=3.6" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-restbuilder" +version = "0.3" +description = "Sphinx extension to output reST files." +optional = true +python-versions = ">=2.7, !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "sphinxcontrib-restbuilder-0.3.tar.gz", hash = "sha256:6b3ee9394b5ec5e73e6afb34d223530d0b9098cb7562f9c5e364e6d6b41410ce"}, + {file = "sphinxcontrib_restbuilder-0.3-py2.py3-none-any.whl", hash = "sha256:6ba2ddc7a87d845c075c1b2e00d541bd1c8400488e50e32c9b4169ccdd9f30cb"}, +] + +[package.dependencies] +Sphinx = ">=1.4" + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-websupport" +version = "1.2.4" +description = "Sphinx API for Web Apps" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-websupport-1.2.4.tar.gz", hash = "sha256:4edf0223a0685a7c485ae5a156b6f529ba1ee481a1417817935b20bde1956232"}, + {file = "sphinxcontrib_websupport-1.2.4-py2.py3-none-any.whl", hash = "sha256:6fc9287dfc823fe9aa432463edd6cea47fa9ebbf488d7f289b322ffcfca075c7"}, +] + +[package.dependencies] +sphinxcontrib-serializinghtml = "*" + +[package.extras] +lint = ["flake8"] +test = ["Sphinx", "pytest", "sqlalchemy", "whoosh"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "token-bucket" +version = "0.3.0" +description = "Very fast implementation of the token bucket algorithm." +optional = false +python-versions = ">=3.5" +files = [ + {file = "token_bucket-0.3.0-py2.py3-none-any.whl", hash = "sha256:6df24309e3cf5b808ae5ef714a3191ec5b54f48c34ef959e4882eef140703369"}, + {file = "token_bucket-0.3.0.tar.gz", hash = "sha256:979571c99db2ff9e651f2b2146a62b2ebadf7de6c217a8781698282976cb675f"}, +] + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = true +python-versions = ">= 3.7" +files = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing" +version = "3.7.4.3" +description = "Type Hints for Python" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "typing-3.7.4.3-py2-none-any.whl", hash = "sha256:283d868f5071ab9ad873e5e52268d611e851c870a2ba354193026f2dfb29d8b5"}, + {file = "typing-3.7.4.3.tar.gz", hash = "sha256:1187fb9c82fd670d10aa07bbb6cfcfe4bdda42d6fab8d5134f04e8c4d0b71cc9"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "xxhash" +version = "3.4.1" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, + {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, + {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, + {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, + {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, + {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, + {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, + {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, + {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, + {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, + {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, + {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, + {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, + {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, + {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, + {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, + {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, + {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, + {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, + {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, +] + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[extras] +docs = [] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "13837790ac1dd2f2458290c7f582a78ae1eada646b314b3b7fb5e667a8d1b350" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 00000000..53b35d37 --- /dev/null +++ b/poetry.toml @@ -0,0 +1,3 @@ +[virtualenvs] +create = true +in-project = true diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..f16ff690 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,67 @@ +[tool.poetry] +name = "frameioclient" +version = "2.0.1a5" +description='Client library for the Frame.io API' +readme = "README.md" +license='MIT' +homepage = "https://github.com/Frameio/python-frameio-client" +authors = ["Frame.io DevRel "] + +classifiers = [ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'Topic :: Multimedia :: Video', + 'Topic :: Software Development :: Libraries', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11' +] + +[tool.poetry.dependencies] +python = "^3.7" +analytics-python = "^1.4.0" +enlighten = "^1.10.2" +importlib-metadata = "^4.11.3" +requests = "^2.27.1" +token-bucket = "^0.3.0" +urllib3 = "^1.26.9" +xxhash = "^3.0.0" +furl = "^2.1.3" +tqdm = "^4.66.2" + +[tool.poetry.dev-dependencies] +bump2version = "^1.0.1" + +# Optional dependencies +Sphinx = { version = "^4.4.0", optional = true } +sphinx-jekyll-builder = { version = "^0.3.0", optional = true } +sphinxcontrib-restbuilder = { version = "^0.3", optional = true } +sphinx-autobuild = { version = "^2021.3.14", optional = true } +contentful_management = { version = "^2.11.0", optional = true } +python-frontmatter = { version = "^1.0.0", optional = true } +sphinx-autodoc-typehints = { version = "^1.17.0", optional = true } +furo = { version = "^2022.3.4", optional = true } +python-dotenv = "^0.19.2" + +[tool.poetry.extras] +docs = [ + "sphinx", + "sphinx-jekyll-builder", + "sphinxcontrib-restbuilder", + "sphinx-autobuild", + "contentful_management", + "python-frontmatter", + "sphinx-autodoc-typehints", + "furo" +] + +[tool.poetry.scripts] +fiocli = 'frameioclient.fiocli:main' + +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta:__legacy__" \ No newline at end of file diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 00000000..8b0efa25 --- /dev/null +++ b/scripts/__init__.py @@ -0,0 +1 @@ +from .benchmark import * \ No newline at end of file diff --git a/scripts/benchmark/__init__.py b/scripts/benchmark/__init__.py new file mode 100644 index 00000000..738214bd --- /dev/null +++ b/scripts/benchmark/__init__.py @@ -0,0 +1 @@ +from .utils import timefunc \ No newline at end of file diff --git a/scripts/benchmark/download.py b/scripts/benchmark/download.py new file mode 100644 index 00000000..a411e94b --- /dev/null +++ b/scripts/benchmark/download.py @@ -0,0 +1,77 @@ +import os +import sys + +from utils import timefunc +import frameioclient + + +def download( + asset_id: str = "", + destination: str = "downloads", + clean_up: bool = True, + size: str = "small", +): + token = os.getenv("FRAMEIO_TOKEN") + client = frameioclient.FrameioClient(token) + asset_info = client.assets.get(asset_id) + download_info = client.assets.download( + asset_info, destination, multi_part=True, replace=True + ) + + if clean_up == True: + os.remove(download_info["destination"]) + + return download_info + + +def test_s3(): + asset_list = [] + stats = [] + for asset in asset_list: + report = download(asset_id=asset) + stats.append(report) + + return stats + + +def test_cloudfront(): + asset_list = [ + "811baf7a-3248-4c7c-9d94-cc1c6c496a76", + "35f8ac33-a710-440e-8dcc-f98cfd90e0e5", + "e981f087-edbb-448d-baad-c8363b78f5ae", + ] + stats = [] + for asset in asset_list: + report = download(asset_id=asset) + stats.append(report) + + return stats + + +def build_metric(s3_stats, cf_stats, baseline): + # Compare S3 against the baseline after calculating the average of the runs + # Compare CF against the baseline after calculating the average of the runs + # Compare S3 against CF and produce a number in Mbit/s {:.2j}? + # Report the asset_id as well + # Report whether something was a HIT or a MISS in cache + # Report which CDN we hit + print("Thing") + pass + + +def run_benchmark(): + s3_stats = test_s3() + cf_stats = test_cloudfront() + # build_metrics(s3_stats, cf_stats, NetworkBandwidth) + + # ComparisonTest(self.user_id, transfer_stats, self.request_logs) + + +if __name__ == "__main__": + # Old Method: + # timefunc(benchmark_download, asset_id='811baf7a-3248-4c7c-9d94-cc1c6c496a76', destination='downloads', iterations=3) # large + # timefunc(benchmark_download, asset_id='35f8ac33-a710-440e-8dcc-f98cfd90e0e5', destination='downloads', iterations=1) # medium + # timefunc(benchmark_download, asset_id='e981f087-edbb-448d-baad-c8363b78f5ae', destination='downloads', iterations=5) # small + + # New method: + run_benchmark() diff --git a/scripts/benchmark/upload.py b/scripts/benchmark/upload.py new file mode 100644 index 00000000..909b8830 --- /dev/null +++ b/scripts/benchmark/upload.py @@ -0,0 +1,16 @@ +import os +import sys + +from utils import timefunc +from frameioclient import FrameioClient + + +def benchmark_upload(source_file='', remote_destination=''): + token = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(token) + client.assets.upload(remote_destination, source_file) + + return True + +if __name__ == "__main__": + timefunc(benchmark_upload, source_file='', remote_destination='dd8526ee-2c7d-4b48-9bf7-b847664666bb', iterations=1) # medium diff --git a/scripts/benchmark/utils.py b/scripts/benchmark/utils.py new file mode 100644 index 00000000..5c2eae8d --- /dev/null +++ b/scripts/benchmark/utils.py @@ -0,0 +1,25 @@ +import sys + +from timeit import default_timer as timer + + +def timefunc(func, *args, **kwargs): + """Time a function. + + args: + iterations=3 + + Usage example: + timeit(myfunc, 1, b=2) + """ + try: + iterations = kwargs.pop("iterations") + except KeyError: + iterations = 3 + elapsed = sys.maxsize + for _ in range(iterations): + start = timer() + result = func(*args, **kwargs) + elapsed = min(timer() - start, elapsed) + print(("Best of {} {}(): {:.9f}".format(iterations, func.__name__, elapsed))) + return result diff --git a/setup.py b/setup.py index 70acd024..8187f56d 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools.command.install import install -version='1.1.0' +version='2.0.0' with open("README.md", "r") as f: long_description = f.read() @@ -17,25 +17,32 @@ def run(self): tag = os.getenv('CIRCLE_TAG') if tag != version: - info = "Git tag: {0} does not match the version of this app: {1}".format( - tag, version - ) + info = f"Git tag: {tag} does not match the version of this app: {version}" sys.exit(info) setuptools.setup( name='frameioclient', version=version, - python_requires='>=2.7.16, <4', + python_requires='>=3.6.5, <4', install_requires=[ + 'analytics-python', + 'enlighten', + 'importlib-metadata ~= 1.0 ; python_version < "3.8"', 'requests', + 'token-bucket', 'urllib3', 'xxhash', - 'importlib-metadata ~= 1.0 ; python_version < "3.8"', - 'futures; python_version == "2.7"' ], extras_require={ 'dev': [ 'bump2version', + 'sphinx', + 'sphinx-jekyll-builder' + ] + }, + entry_points ={ + 'console_scripts': [ + 'fiocli = frameioclient.fiocli:main' ] }, classifiers=[ @@ -44,10 +51,6 @@ def run(self): 'Topic :: Multimedia :: Video', 'Topic :: Software Development :: Libraries', 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', diff --git a/tests/integration.py b/tests/integration.py index 42f7be82..80ecbf73 100644 --- a/tests/integration.py +++ b/tests/integration.py @@ -1,341 +1,14 @@ -import os import sys -import json -import time -import socket -import platform -import mimetypes -import shutil -import requests +# Send integration test to py2 or py3 based on stuff -from math import ceil -from pprint import pprint, pformat -from datetime import datetime -from frameioclient import FrameioClient, Utils, KB, MB - -token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token -project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into -download_asset_id = os.getenv("DOWNLOAD_FOLDER_ID") # Source folder on Frame.io (to then verify against) -environment = os.getenv("ENVIRONMENT", default="PRODUCTION") -slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL") -ci_job_name = os.getenv("CIRCLE_JOB", default=None) - -retries = 0 - -# Initialize the client -def init_client(): - if len(token) < 5: - print("Bad token, exiting test.") - sys.exit(1) - - if environment == "PRODUCTION": - client = FrameioClient(token) - print("Client connection initialized.") - - else: - client = FrameioClient(token, host='https://api.dev.frame.io') - print("Client connection initialized.") - - return client - -# Verify local and source -def verify_local(client, dl_children): - # Compare remote filenames and hashes - global dl_items - dl_items = dict() - - # Iterate over local directory and get filenames and hashes - dled_files = os.listdir('downloads') - for count, fn in enumerate(dled_files, start=1): - print("{}/{} Generating hash for: {}".format(count, len(dled_files), fn)) - dl_file_path = os.path.join(os.path.abspath(os.path.curdir), 'downloads', fn) - print("Path to downloaded file for hashing: {}".format(dl_file_path)) - xxhash = Utils.calculate_hash(dl_file_path) - xxhash_name = "{}_{}".format(fn, 'xxHash') - dl_items[xxhash_name] = xxhash - - print("QCing Downloaded Files...") - - print("Original Items Check: \n") - og_items = flatten_asset_children(dl_children) - pprint(og_items) - - print("Downloaded Items Check: \n") - pprint(dl_items) - - pass_fail = Utils.compare_items(og_items, dl_items) - - # If verification fails here, try downloading again. - if pass_fail == False: - print("Mismatch between original and downloaded files, re-downloading...") - test_download(client, override=True) - else: - return True - -# Test download functionality -def test_download(client, override=False): - print("Testing download function...") - if override: - # Clearing download directory - shutil.rmtree('./downloads') - - if os.path.isdir('downloads'): - print("Local downloads folder detected...") - asset_list = client.assets.get_children( - download_asset_id, - page=1, - page_size=40, - include="children" - ) - - verify_local(client, asset_list) - return True - - os.mkdir('downloads') - - asset_list = client.assets.get_children( - download_asset_id, - page=1, - page_size=40, - include="children" - ) - - print("Downloading {} files.".format(len(asset_list))) - for count, asset in enumerate(asset_list, start=1): - start_time = time.time() - print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) - - client.assets.download(asset, 'downloads', multi_part=True, concurrency=20) - - download_time = time.time() - start_time - download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) - - print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) - - print("Done downloading files") - - # Verify downloads - if verify_local(client, asset_list): - print("Download verification passed") - - return True - -# Test upload functionality -def test_upload(client): - print("Beginning upload test") - # Create new parent asset - project_info = client.projects.get(project_id) - root_asset_id = project_info['root_asset_id'] - - print("Creating new folder to upload to") - new_folder = client.assets.create( - parent_asset_id=root_asset_id, - name="{}_{}_Py{}_{}".format(socket.gethostname(), platform.system(), platform.python_version(), datetime.now().strftime("%B-%d-%Y")), - type="folder", - ) - - new_parent_id = new_folder['id'] - - print("Folder created, id: {}, name: {}".format(new_parent_id, new_folder['name'])) - - # Upload all the files we downloaded earlier - dled_files = os.listdir('downloads') - - for count, fn in enumerate(dled_files, start=1): - start_time = time.time() - ul_abs_path = os.path.join(os.curdir, 'downloads', fn) - filesize = os.path.getsize(ul_abs_path) - filename = os.path.basename(ul_abs_path) - - print("{}/{} Beginning to upload: {}".format(count, len(dled_files), fn)) - - client.assets.upload(new_parent_id, ul_abs_path) - - upload_time = time.time() - start_time - upload_speed = Utils.format_bytes(ceil(filesize/(upload_time))) - - print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) - - print("Sleeping for 10 seconds to allow upload and media analysis to finish...") - time.sleep(10) - - print("Continuing...") - - return new_parent_id - -# Flatten asset children and pull out important info for comparison -def flatten_asset_children(asset_children): - flat_dict = dict() - - for asset in asset_children: - try: - xxhash_name = "{}_{}".format(asset['name'], 'xxHash') - xxhash_checksum = asset['checksums']['xx_hash'] - - if sys.version_info.major < 3: # if Python 2 convert the field - xxhash_checksum = str(xxhash_checksum.encode('utf-8')) - - flat_dict[xxhash_name] = xxhash_checksum - - except TypeError as e: - print(e) - xxhash_name = "{}_{}".format(asset['name'], 'xxHash') - flat_dict[xxhash_name] = "missing" - - continue - - return flat_dict - -def check_for_checksums(client, upload_folder_id): - # Get asset children for upload folder - asset_children = client.assets.get_children( - upload_folder_id, - page=1, - page_size=40, - include="children" - ) - - global retries - print("Checking for checksums attempt #{}".format(retries+1)) - - if retries < 20: - for asset in asset_children: - try: - asset['checksums']['xx_hash'] - print("Success...") - print("Asset ID: {}".format(asset['id'])) - print("Asset Name: {}".format(asset['name'])) - print("Checksum dict: {}".format(asset['checksums'])) - except TypeError as e: - # print(e) - print("Failure...") - print("Checksum dict: {}".format(asset['checksums'])) - print("Asset ID: {}".format(asset['id'])) - print("Asset Name: {}".format(asset['name'])) - print("Checksums not yet calculated, sleeping for 15 seconds.") - time.sleep(15) - retries += 1 - check_for_checksums(client, upload_folder_id) - return True - else: - return False - -def check_upload_completion(client, download_folder_id, upload_folder_id): - # Do a comparison against filenames and filesizes here to make sure they match - - print("Beginning upload comparison check") - - # Get asset children for download folder - dl_asset_children = client.assets.get_children( - download_folder_id, - page=1, - page_size=40, - include="children" - ) - - print("Got asset children for original download folder") - - print("Making sure checksums are calculated before verifying") - check_for_checksums(client, upload_folder_id) - - # Get asset children for upload folder - ul_asset_children = client.assets.get_children( - upload_folder_id, - page=1, - page_size=40, - include="children" - ) - - print("Got asset children for uploaded folder") - - global dl_items # Get the global dl_items - - # if len(dl_items.items) < 1: - - og_items = flatten_asset_children(dl_asset_children) - ul_items = flatten_asset_children(ul_asset_children) - - print("'Completed' uploads: {}/{}".format(int(len(ul_items)), int(len(og_items)))) - print("Percentage uploads completed but not verified: {:.2%}".format(len(ul_items)/len(og_items))) - - print("Running verification...") - - print("OG Items Check:") - pprint(og_items) - - print("DL Items Check:") - pprint(dl_items) - - print("UL Items Check:") - pprint(ul_items) - - pass_fail = Utils.compare_items(og_items, ul_items) - - print("Verification complete for {}/{} uploaded assets.".format(int(len(ul_items)), int(len(og_items)))) - - if ci_job_name is not None: - print("CircleCI Job Name: {}".format(ci_job_name)) - if ci_job_name == "upload_test_job": - send_to_slack(format_slack_message(pass_fail, og_items, dl_items, ul_items)) - - if pass_fail == True: - print("Integration test passed! :)") - else: - print("Integration test failed! :(") - sys.exit(1) - - return True - -def format_slack_message(pass_fail, og_items, dl_items, ul_items): - # Format slack message for sending - message = "Test Pass/Fail: *{}*\n\n*Original assets:* \n{}\n*Downloaded assets:* \n {}\n*Uploaded assets:* \n {}".format(pass_fail, pformat(og_items), pformat(dl_items), pformat(ul_items)) - print(message) - - return message - -def send_to_slack(message): - # Send Slack message to provided - if len(slack_webhook_url) < 2: - print("No Slack webhook ENV var provided, not sending a Slack message...") - - data = { - 'text': message, - 'username': 'Upload Integration Test', - 'icon_emoji': ':robot_face:' - } +if __name__ == "__main__": + version_major = sys.version_info[0] + version_minor = sys.version_info[1] - response = requests.post(slack_webhook_url, data=json.dumps( - data), headers={'Content-Type': 'application/json'}) - - print('Response: ' + str(response.text)) - print('Response code: ' + str(response.status_code)) + if version_major > 3 and version_minor > 6: + import py3_integration + py3_integration.run_test() - if response.status_code == 200: - return True else: - return False - -def clean_up(client, asset_to_delete): - print("Removing files from test...") - - try: - client._api_call('delete', '/assets/{}'.format(asset_to_delete)) - print("Managed to cleanup!") - except Exception as e: - print(e) - - return True - -def run_test(): - print("Beginning Integration test...") - - client = init_client() - test_download(client) - upload_folder_id = test_upload(client) - check_upload_completion(client, download_asset_id, upload_folder_id) - # clean_up(client, upload_folder_id) - - print("Test complete, exiting...") - -if __name__ == "__main__": - run_test() + import py2_integration + py2_integration.run_test() \ No newline at end of file diff --git a/tests/py2_integration.py b/tests/py2_integration.py new file mode 100644 index 00000000..4c2c2bba --- /dev/null +++ b/tests/py2_integration.py @@ -0,0 +1,343 @@ +import os +import sys +import json +import time +import shutil +import socket +import requests +import platform + +from math import ceil +from pprint import pprint, pformat +from datetime import datetime +from frameioclient import FrameioClient, Utils, KB, MB +from frameioclient.lib.utils import FormatTypes + +token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token +project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into +download_asset_id = os.getenv("DOWNLOAD_FOLDER_ID") # Source folder on Frame.io (to then verify against) +environment = os.getenv("ENVIRONMENT", default="PRODUCTION") +slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL") +ci_job_name = os.getenv("CIRCLE_JOB", default=None) + +download_dir = 'downloads' + +retries = 0 + +# Initialize the client +def init_client(): + if len(token) < 5: + print("Bad token, exiting test.") + sys.exit(1) + + if environment == "PRODUCTION": + client = FrameioClient(token, threads=10) + print("Client connection initialized.") + + else: + client = FrameioClient(token, host='https://api.dev.frame.io', threads=10) + print("Client connection initialized.") + + return client + +# Verify local and source +def verify_local(client, dl_children): + # Compare remote filenames and hashes + global dl_items + dl_items = dict() + + # Iterate over local directory and get filenames and hashes + dled_files = os.listdir(download_dir) + for count, fn in enumerate(dled_files, start=1): + print("{}/{} Generating hash for: {}".format(count, len(dled_files), fn)) + dl_file_path = os.path.join(os.path.abspath(os.path.curdir), download_dir, fn) + print("Path to downloaded file for hashing: {}".format(dl_file_path)) + xxhash = Utils.calculate_hash(dl_file_path) + xxhash_name = "{}_{}".format(fn, 'xxHash') + dl_items[xxhash_name] = xxhash + + print("QCing Downloaded Files...") + + print("Original Items Check: \n") + og_items = flatten_asset_children(dl_children) + pprint(og_items) + + print("Downloaded Items Check: \n") + pprint(dl_items) + + pass_fail = Utils.compare_items(og_items, dl_items) + + # If verification fails here, try downloading again. + if pass_fail == False: + print("Mismatch between original and downloaded files, re-downloading...") + test_download(client, override=True) + else: + return True + +# Test download functionality +def test_download(client, override=False): + print("Testing download function...") + if override: + # Clearing download directory + shutil.rmtree(download_dir) + + if os.path.isdir(download_dir): + print("Local downloads folder detected...") + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + verify_local(client, asset_list) + return True + + os.mkdir(download_dir) + + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + print("Downloading {} files.".format(len(asset_list))) + for count, asset in enumerate(asset_list, start=1): + start_time = time.time() + print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) + + client.assets.download(asset, download_dir, multi_part=True) + + download_time = time.time() - start_time + download_speed = Utils.format_value(ceil(asset['filesize']/(download_time)), type=FormatTypes.SPEED) + + print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) + + print("Done downloading files") + + # Verify downloads + if verify_local(client, asset_list): + print("Download verification passed") + + return True + +# Test upload functionality +def test_upload(client): + print("Beginning upload test") + # Create new parent asset + project_info = client.projects.get(project_id) + root_asset_id = project_info['root_asset_id'] + + print("Creating new folder to upload to") + new_folder = client.assets.create( + parent_asset_id=root_asset_id, + name="{}_{}_Py{}_{}".format(socket.gethostname(), platform.system(), platform.python_version(), datetime.now().strftime("%B-%d-%Y")), + type="folder", + ) + + new_parent_id = new_folder['id'] + + print("Folder created, id: {}, name: {}".format(new_parent_id, new_folder['name'])) + + # Upload all the files we downloaded earlier + dled_files = os.listdir(download_dir) + + for count, fn in enumerate(dled_files, start=1): + start_time = time.time() + ul_abs_path = os.path.join(os.curdir, download_dir, fn) + filesize = os.path.getsize(ul_abs_path) + filename = os.path.basename(ul_abs_path) + + print("{}/{} Beginning to upload: {}".format(count, len(dled_files), fn)) + + client.assets.upload(new_parent_id, ul_abs_path) + + upload_time = time.time() - start_time + upload_speed = Utils.format_value(ceil(filesize/(upload_time))) + + print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) + + print("Sleeping for 10 seconds to allow upload and media analysis to finish...") + time.sleep(10) + + print("Continuing...") + + return new_parent_id + +# Flatten asset children and pull out important info for comparison +def flatten_asset_children(asset_children): + flat_dict = dict() + + for asset in asset_children: + try: + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + xxhash_checksum = asset['checksums']['xx_hash'] + + if sys.version_info.major < 3: # if Python 2 convert the field + xxhash_checksum = str(xxhash_checksum.encode('utf-8')) + + flat_dict[xxhash_name] = xxhash_checksum + + except TypeError as e: + print(e) + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + flat_dict[xxhash_name] = "missing" + + continue + + return flat_dict + +def check_for_checksums(client, upload_folder_id): + # Get asset children for upload folder + asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + global retries + print("Checking for checksums attempt #{}".format(retries+1)) + + if retries < 20: + for asset in asset_children: + try: + asset['checksums']['xx_hash'] + print("Success...") + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksum dict: {}".format(asset['checksums'])) + except TypeError as e: + # print(e) + print("Failure...") + print("Checksum dict: {}".format(asset['checksums'])) + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksums not yet calculated, sleeping for 15 seconds.") + time.sleep(15) + retries += 1 + check_for_checksums(client, upload_folder_id) + return True + else: + return False + +def check_upload_completion(client, download_folder_id, upload_folder_id): + # Do a comparison against filenames and filesizes here to make sure they match + + print("Beginning upload comparison check") + + # Get asset children for download folder + dl_asset_children = client.assets.get_children( + download_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for original download folder") + + print("Making sure checksums are calculated before verifying") + check_for_checksums(client, upload_folder_id) + + # Get asset children for upload folder + ul_asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for uploaded folder") + + global dl_items # Get the global dl_items + + # if len(dl_items.items) < 1: + + og_items = flatten_asset_children(dl_asset_children) + ul_items = flatten_asset_children(ul_asset_children) + + print("'Completed' uploads: {}/{}".format(int(len(ul_items)), int(len(og_items)))) + print("Percentage uploads completed but not verified: {:.2%}".format(len(ul_items)/len(og_items))) + + print("Running verification...") + + print("OG Items Check:") + pprint(og_items) + + print("DL Items Check:") + pprint(dl_items) + + print("UL Items Check:") + pprint(ul_items) + + pass_fail = Utils.compare_items(og_items, ul_items) + + print("Verification complete for {}/{} uploaded assets.".format(int(len(ul_items)), int(len(og_items)))) + + if ci_job_name is not None: + print("CircleCI Job Name: {}".format(ci_job_name)) + if ci_job_name == "upload_test_job": + send_to_slack(format_slack_message(pass_fail, og_items, dl_items, ul_items)) + + if pass_fail == True: + print("Integration test passed! :)") + else: + print("Integration test failed! :(") + sys.exit(1) + + return True + +def format_slack_message(pass_fail, og_items, dl_items, ul_items): + # Format slack message for sending + message = "Test Pass/Fail: *{}*\n\n*Original assets:* \n{}\n*Downloaded assets:* \n {}\n*Uploaded assets:* \n {}".format(pass_fail, pformat(og_items), pformat(dl_items), pformat(ul_items)) + print(message) + + return message + +def send_to_slack(message): + # Send Slack message to provided + if len(slack_webhook_url) < 2: + print("No Slack webhook ENV var provided, not sending a Slack message...") + + data = { + 'text': message, + 'username': 'Upload Integration Test', + 'icon_emoji': ':robot_face:' + } + + response = requests.post(slack_webhook_url, data=json.dumps( + data), headers={'Content-Type': 'application/json'}) + + print('Response: ' + str(response.text)) + print('Response code: ' + str(response.status_code)) + + if response.status_code == 200: + return True + else: + return False + +def clean_up(client, asset_to_delete): + print("Removing files from test...") + + try: + client._api_call('delete', '/assets/{}'.format(asset_to_delete)) + print("Managed to cleanup!") + except Exception as e: + print(e) + + return True + +def run_test(): + print("Beginning Integration test...") + + client = init_client() + test_download(client) + upload_folder_id = test_upload(client) + check_upload_completion(client, download_asset_id, upload_folder_id) + # clean_up(client, upload_folder_id) + + print("Test complete, exiting...") + +if __name__ == "__main__": + run_test() diff --git a/tests/py3_integration.py b/tests/py3_integration.py new file mode 100644 index 00000000..c4564f11 --- /dev/null +++ b/tests/py3_integration.py @@ -0,0 +1,343 @@ +import os +import sys +import json +import time +import shutil +import socket +import requests +import platform + +from math import ceil +from pprint import pprint, pformat +from datetime import datetime +from frameioclient import FrameioClient, Utils, KB, MB +from frameioclient.lib.utils import FormatTypes + +token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token +project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into +download_asset_id = os.getenv("DOWNLOAD_FOLDER_ID") # Source folder on Frame.io (to then verify against) +environment = os.getenv("ENVIRONMENT", default="PRODUCTION") +slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL") +ci_job_name = os.getenv("CIRCLE_JOB", default=None) + +download_dir = 'downloads' + +retries = 0 + +# Initialize the client +def init_client() -> FrameioClient: + if len(token) < 5: + print("Bad token, exiting test.") + sys.exit(1) + + if environment == "PRODUCTION": + client = FrameioClient(token, threads=10) + print("Client connection initialized.") + + else: + client = FrameioClient(token, host='https://api.dev.frame.io', threads=10) + print("Client connection initialized.") + + return client + +# Verify local and source +def verify_local(client: FrameioClient, dl_children): + # Compare remote filenames and hashes + global dl_items + dl_items = dict() + + # Iterate over local directory and get filenames and hashes + dled_files = os.listdir(download_dir) + for count, fn in enumerate(dled_files, start=1): + print("{}/{} Generating hash for: {}".format(count, len(dled_files), fn)) + dl_file_path = os.path.join(os.path.abspath(os.path.curdir), download_dir, fn) + print("Path to downloaded file for hashing: {}".format(dl_file_path)) + xxhash = Utils.calculate_hash(dl_file_path) + xxhash_name = "{}_{}".format(fn, 'xxHash') + dl_items[xxhash_name] = xxhash + + print("QCing Downloaded Files...") + + print("Original Items Check: \n") + og_items = flatten_asset_children(dl_children) + pprint(og_items) + + print("Downloaded Items Check: \n") + pprint(dl_items) + + pass_fail = Utils.compare_items(og_items, dl_items) + + # If verification fails here, try downloading again. + if pass_fail == False: + print("Mismatch between original and downloaded files, re-downloading...") + test_download(client, override=True) + else: + return True + +# Test download functionality +def test_download(client: FrameioClient, override=False): + print("Testing download function...") + if override: + # Clearing download directory + shutil.rmtree(download_dir) + + if os.path.isdir(download_dir): + print("Local downloads folder detected...") + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + verify_local(client, asset_list) + return True + + os.mkdir(download_dir) + + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + print("Downloading {} files.".format(len(asset_list))) + for count, asset in enumerate(asset_list, start=1): + start_time = time.time() + print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) + + client.assets.download(asset, download_dir, multi_part=True) + + download_time = time.time() - start_time + download_speed = Utils.format_value(ceil(asset['filesize']/(download_time)), type=FormatTypes.SPEED) + + print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) + + print("Done downloading files") + + # Verify downloads + if verify_local(client, asset_list): + print("Download verification passed") + + return True + +# Test upload functionality +def test_upload(client: FrameioClient): + print("Beginning upload test") + # Create new parent asset + project_info = client.projects.get(project_id) + root_asset_id = project_info['root_asset_id'] + + print("Creating new folder to upload to") + new_folder = client.assets.create( + parent_asset_id=root_asset_id, + name="{}_{}_Py{}_{}".format(socket.gethostname(), platform.system(), platform.python_version(), datetime.now().strftime("%B-%d-%Y")), + type="folder", + ) + + new_parent_id = new_folder['id'] + + print("Folder created, id: {}, name: {}".format(new_parent_id, new_folder['name'])) + + # Upload all the files we downloaded earlier + dled_files = os.listdir(download_dir) + + for count, fn in enumerate(dled_files, start=1): + start_time = time.time() + ul_abs_path = os.path.join(os.curdir, download_dir, fn) + filesize = os.path.getsize(ul_abs_path) + filename = os.path.basename(ul_abs_path) + + print("{}/{} Beginning to upload: {}".format(count, len(dled_files), fn)) + + client.assets.upload(new_parent_id, ul_abs_path) + + upload_time = time.time() - start_time + upload_speed = Utils.format_value(ceil(filesize/(upload_time)), type=FormatTypes.SPEED) + + print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) + + print("Sleeping for 10 seconds to allow upload and media analysis to finish...") + time.sleep(10) + + print("Continuing...") + + return new_parent_id + +# Flatten asset children and pull out important info for comparison +def flatten_asset_children(asset_children): + flat_dict = dict() + + for asset in asset_children: + try: + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + xxhash_checksum = asset['checksums']['xx_hash'] + + if sys.version_info.major < 3: # if Python 2 convert the field + xxhash_checksum = str(xxhash_checksum.encode('utf-8')) + + flat_dict[xxhash_name] = xxhash_checksum + + except TypeError as e: + print(e) + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + flat_dict[xxhash_name] = "missing" + + continue + + return flat_dict + +def check_for_checksums(client, upload_folder_id): + # Get asset children for upload folder + asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + global retries + print("Checking for checksums attempt #{}".format(retries+1)) + + if retries < 20: + for asset in asset_children: + try: + asset['checksums']['xx_hash'] + print("Success...") + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksum dict: {}".format(asset['checksums'])) + except TypeError as e: + # print(e) + print("Failure...") + print("Checksum dict: {}".format(asset['checksums'])) + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksums not yet calculated, sleeping for 15 seconds.") + time.sleep(15) + retries += 1 + check_for_checksums(client, upload_folder_id) + return True + else: + return False + +def check_upload_completion(client, download_folder_id, upload_folder_id): + # Do a comparison against filenames and filesizes here to make sure they match + + print("Beginning upload comparison check") + + # Get asset children for download folder + dl_asset_children = client.assets.get_children( + download_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for original download folder") + + print("Making sure checksums are calculated before verifying") + check_for_checksums(client, upload_folder_id) + + # Get asset children for upload folder + ul_asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for uploaded folder") + + global dl_items # Get the global dl_items + + # if len(dl_items.items) < 1: + + og_items = flatten_asset_children(dl_asset_children) + ul_items = flatten_asset_children(ul_asset_children) + + print("'Completed' uploads: {}/{}".format(int(len(ul_items)), int(len(og_items)))) + print("Percentage uploads completed but not verified: {:.2%}".format(len(ul_items)/len(og_items))) + + print("Running verification...") + + print("OG Items Check:") + pprint(og_items) + + print("DL Items Check:") + pprint(dl_items) + + print("UL Items Check:") + pprint(ul_items) + + pass_fail = Utils.compare_items(og_items, ul_items) + + print("Verification complete for {}/{} uploaded assets.".format(int(len(ul_items)), int(len(og_items)))) + + if ci_job_name is not None: + print("CircleCI Job Name: {}".format(ci_job_name)) + if ci_job_name == "upload_test_job": + send_to_slack(format_slack_message(pass_fail, og_items, dl_items, ul_items)) + + if pass_fail == True: + print("Integration test passed! :)") + else: + print("Integration test failed! :(") + sys.exit(1) + + return True + +def format_slack_message(pass_fail, og_items, dl_items, ul_items): + # Format slack message for sending + message = "Test Pass/Fail: *{}*\n\n*Original assets:* \n{}\n*Downloaded assets:* \n {}\n*Uploaded assets:* \n {}".format(pass_fail, pformat(og_items), pformat(dl_items), pformat(ul_items)) + print(message) + + return message + +def send_to_slack(message: str): + # Send Slack message to provided + if len(slack_webhook_url) < 2: + print("No Slack webhook ENV var provided, not sending a Slack message...") + + data = { + 'text': message, + 'username': 'Upload Integration Test', + 'icon_emoji': ':robot_face:' + } + + response = requests.post(slack_webhook_url, data=json.dumps( + data), headers={'Content-Type': 'application/json'}) + + print('Response: ' + str(response.text)) + print('Response code: ' + str(response.status_code)) + + if response.status_code == 200: + return True + else: + return False + +def clean_up(client: FrameioClient, asset_to_delete: str): + print("Removing files from test...") + + try: + client._api_call('delete', '/assets/{}'.format(asset_to_delete)) + print("Managed to cleanup!") + except Exception as e: + print(e) + + return True + +def run_test(): + print("Beginning Integration test...") + + client = init_client() + test_download(client) + upload_folder_id = test_upload(client) + check_upload_completion(client, download_asset_id, upload_folder_id) + # clean_up(client, upload_folder_id) + + print("Test complete, exiting...") + +if __name__ == "__main__": + run_test()