From 6f22fa64d39882e387c1eecbef37902dafbe3ca7 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Sun, 16 May 2021 10:53:53 +0300 Subject: [PATCH 01/34] modern python --- .circleci/config.yml | 162 +++++++++++++++++++++++++----------------- pyproject.toml | 46 ++++++++++++ requirements.txt | 3 + test-requirements.txt | 8 --- tox.ini | 13 ++++ 5 files changed, 158 insertions(+), 74 deletions(-) create mode 100644 pyproject.toml create mode 100644 requirements.txt delete mode 100644 test-requirements.txt create mode 100644 tox.ini diff --git a/.circleci/config.yml b/.circleci/config.yml index 9e7918f..ae7a680 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,97 +1,127 @@ +version: 2.1 -# Python CircleCI 2.0 configuration file -# -# Check https://circleci.com/docs/2.0/language-python/ for more details -# -version: 2 -jobs: - build: - docker: - - image: circleci/python:3.6.9 - - image: redislabs/redisai:edge-cpu-bionic - - working_directory: ~/repo +commands: + abort_for_docs: steps: - - checkout - - - restore_cache: # Download and cache dependencies - keys: - - v1-dependencies-{{ checksum "test-requirements.txt" }} - # fallback to using the latest cache if no exact match is found - - v1-dependencies- - - run: - name: install dependencies + name: Avoid tests for docs command: | - virtualenv --no-site-packages venv - . venv/bin/activate - pip install -r test-requirements.txt - - - save_cache: - paths: - - ./venv - key: v1-dependencies-{{ checksum "test-requirements.txt" }} + if [[ $CIRCLE_BRANCH == *docs ]]; then + echo "Identifies as documents PR, no testing required" + circleci step halt + fi + abort_for_noci: + steps: - run: - name: run tests + name: Ignore CI for specific branches command: | - . venv/bin/activate - nosetests --with-coverage -vsx test - codecov + if [[ $CIRCLE_BRANCH == *noci ]]; then + echo "Identifies as actively ignoring CI, no testing required." + circleci step halt + fi - - store_artifacts: - path: test-reports - destination: test-reports - build_nightly: - docker: - - image: circleci/python:3.6.9 - - image: redislabs/redisai:edge-cpu-bionic - - working_directory: ~/repo + early_return_for_forked_pull_requests: + description: >- + If this build is from a fork, stop executing the current job and return success. + This is useful to avoid steps that will fail due to missing credentials. + steps: + - run: + name: Early return if this build is from a forked PR + command: | + if [[ -n "$CIRCLE_PR_NUMBER" ]]; then + echo "Nothing to do for forked PRs, so marking this step successful" + circleci step halt + fi + build_and_test: steps: - - checkout + - checkout - restore_cache: # Download and cache dependencies keys: - - v1-dependencies-{{ checksum "test-requirements.txt" }} + - v1-dependencies-{{ checksum "pyproject.toml" }} # fallback to using the latest cache if no exact match is found - v1-dependencies- - run: - name: install dependencies + name: install tox dependencies command: | - virtualenv --no-site-packages venv - . venv/bin/activate - pip install -r test-requirements.txt + pip install --user --quiet -r requirements.txt + + - run: + name: build + command: | + poetry build --format sdist + poetry build --format wheel + + - run: + name: run tests + command: + tox - save_cache: paths: - ./venv - key: v1-dependencies-{{ checksum "test-requirements.txt" }} + key: v1-dependencies-{{ checksum "pyproject.toml" }} - - run: - name: run tests - command: | - . venv/bin/activate - nosetests -vsx test +jobs: + build: + parameters: + python_version: + type: string + docker: + - image: circleci/python:<> + - image: redislabs/redisai:edge-cpu-bionic - # no need for store_artifacts on nightly builds + steps: + - build_and_test + - store_artifacts: + path: test-reports + destination: test-reports + + nightly: + docker: + - image: circleci/python:<> + - image: redislabs/redisai:edge-cpu-bionic + steps: + - build_and_test + +on-any-branch: &on-any-branch + filters: + branches: + only: + - /.*/ + tags: + ignore: /.*/ + +python-versions: &python-versions + matrix: + parameters: + python_version: + - "3.6.9" + - "3.7.9" + - "3.8.9" + - "3.9.4" workflows: version: 2 commit: jobs: - - build - nightly: - triggers: - - schedule: - cron: "0 0 * * *" - filters: - branches: - only: - - master - jobs: - - build_nightly + - build: + <<: *on-any-branch + <<: *python-versions + +# nightly: +# triggers: +# - schedule: +# cron: "0 0 * * *" +# filters: +# branches: +# only: +# - master +# jobs: +# - nightly: +# <<: *python-versions diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..58bd8c3 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,46 @@ +[tool.poetry] +name = "redisai" +version = "1.0.2" +description = "RedisAI Python Client" +authors = ["RedisLabs "] +license = "BSD-3-Clause" +readme = "README.rst" + +packages = [ + { include = 'redisai' }, +] + +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Database' +] + +[tool.poetry.dependencies] +python = "^3.6" +redis = ">=2.10" +hiredis = ">=0.20" +numpy = ">=1.19.5" + +[tool.poetry.dev-dependencies] +codecov = "^2.1.11" +flake8 = "^3.9.2" +rmtest = "^0.7.0" +nose = "^1.3.7" +ml2rt = "^0.2.0" + +[tool.poetry.urls] +url = "https://redisai.io" +repository = "https://github.com/RedisAI/redisai-py" + + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..1c010d2 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,3 @@ +poetry>=1.1.6 +tox>=3.23.1 +tox-poetry>=0.3.0 diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 38dac3d..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,8 +0,0 @@ -hiredis>=0.2.0 -redis>=2.10 -rmtest>=0.2 -six>=1.10.0 -nose -codecov -numpy -ml2rt diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..10a2707 --- /dev/null +++ b/tox.ini @@ -0,0 +1,13 @@ +[tox] + +[flake8] +max-complexity = 10 +ignore = E501 +srcdir = redisai +exclude = + .git, + __pycache__, + +[testenv] +commands = + nosetests -vsx test From 2e4bca18c7ff199b3bfe507b76954dc432fc2c8f Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Sun, 16 May 2021 11:29:18 +0300 Subject: [PATCH 02/34] adding nighties --- .circleci/config.yml | 25 ++++++++++++++----------- setup.py | 32 -------------------------------- 2 files changed, 14 insertions(+), 43 deletions(-) delete mode 100644 setup.py diff --git a/.circleci/config.yml b/.circleci/config.yml index ae7a680..72e4c10 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -83,6 +83,9 @@ jobs: destination: test-reports nightly: + parameters: + python_version: + type: string docker: - image: circleci/python:<> - image: redislabs/redisai:edge-cpu-bionic @@ -114,14 +117,14 @@ workflows: <<: *on-any-branch <<: *python-versions -# nightly: -# triggers: -# - schedule: -# cron: "0 0 * * *" -# filters: -# branches: -# only: -# - master -# jobs: -# - nightly: -# <<: *python-versions + nightly: + triggers: + - schedule: + cron: "0 0 * * *" + filters: + branches: + only: + - master + jobs: + - nightly: + <<: *python-versions diff --git a/setup.py b/setup.py deleted file mode 100644 index 6b3dfd3..0000000 --- a/setup.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -from setuptools import setup, find_packages - - -with open("README.rst") as f: - long_description = f.read() - -setup( - name="redisai", - version="1.0.2", - description="RedisAI Python Client", - long_description=long_description, - long_description_content_type="text/x-rst", - url="http://github.com/RedisAI/redisai-py", - author="RedisLabs", - author_email="oss@redislabs.com", - packages=find_packages(), - install_requires=["redis", "hiredis", "numpy"], - python_requires=">=3.6", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Topic :: Database", - ], -) From 6cf5bc5cc53360aa25a7837722303e082d316556 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Sun, 16 May 2021 13:48:35 +0300 Subject: [PATCH 03/34] adding docker file skeleton that builds circleci docker build and publish build instructions --- .circleci/config.yml | 40 ++++++++++++++++++++++++++++------------ Dockerfile | 13 +++++++++++++ README.rst | 15 +++++++++++++-- pyproject.toml | 4 ++++ requirements.txt | 1 - tox.ini | 11 ++++++++++- 6 files changed, 68 insertions(+), 16 deletions(-) create mode 100644 Dockerfile diff --git a/.circleci/config.yml b/.circleci/config.yml index 72e4c10..9f191e1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -38,7 +38,6 @@ commands: build_and_test: steps: - - checkout - restore_cache: # Download and cache dependencies keys: @@ -57,6 +56,11 @@ commands: poetry build --format sdist poetry build --format wheel + - run: + name: lint + command: | + tox -e linters + - run: name: run tests command: @@ -67,6 +71,17 @@ commands: - ./venv key: v1-dependencies-{{ checksum "pyproject.toml" }} + # build and push docker + dockerize: + steps: + - checkout + - setup_remote_docker + - run: + name: build dockers + command: | + docker login -u redisfab -p $DOCKER_REDISFAB_PWD + docker build -t redislabs/redisai-py:edge . + docker push jobs: build: parameters: @@ -91,6 +106,7 @@ jobs: - image: redislabs/redisai:edge-cpu-bionic steps: - build_and_test + - dockerize on-any-branch: &on-any-branch filters: @@ -117,14 +133,14 @@ workflows: <<: *on-any-branch <<: *python-versions - nightly: - triggers: - - schedule: - cron: "0 0 * * *" - filters: - branches: - only: - - master - jobs: - - nightly: - <<: *python-versions +# nightly: +# triggers: +# - schedule: +# cron: "0 0 * * *" +# filters: +# branches: +# only: +# - master +# jobs: +# - nightly: +# <<: *python-versions diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..f725834 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,13 @@ +ARG OSNICK=bionic +ARG TARGET=cpu + +FROM redislabs/redisai:edge-${TARGET}-${OSNICK} + +RUN apt update && apt install -y python3 python3-pip +ADD . /build +WORKDIR /build +RUN pip3 install -r requirements.txt +RUN poetry config virtualenvs.create false +RUN poetry install +RUN poetry build +RUN pip3 install dist/redisai*.tar.gz diff --git a/README.rst b/README.rst index f6b0bdc..02cd5c3 100644 --- a/README.rst +++ b/README.rst @@ -18,13 +18,13 @@ redisai-py :target: https://codecov.io/gh/RedisAI/redisai-py .. image:: https://readthedocs.org/projects/redisai-py/badge/?version=latest - :target: https://redisai-py.readthedocs.io/en/latest/?badge=latest + :target: https://redisai-py.readthedocs.io/en/latest/?badge=latest .. image:: https://img.shields.io/badge/Forum-RedisAI-blue :target: https://forum.redislabs.com/c/modules/redisai .. image:: https://img.shields.io/discord/697882427875393627?style=flat-square - :target: https://discord.gg/rTQm7UZ + :target: https://discord.gg/rTQm7UZ redisai-py is the Python client for RedisAI. Checkout the `documentation `_ for API details and examples @@ -47,6 +47,17 @@ Installation $ pip install ml2rt +Development +----------- + +1. Create a virtualenv to manage your python dependencies, and ensure it's active. + ```virtualenv -v venv``` +2. Install [pypoetry](https://python-poetry.org/) to manage your dependencies. + ```pip install -r requirements.txt``` +3. Install dependencies. + ```poetry install``` + +[tox](https://tox.readthedocs.io/en/latest/) runs all tests as its default target. Running *tox* by itself will run unit tests. Ensure you have a running redis, with the redisai client loaded. `RedisAI example repo `_ shows few examples made using redisai-py under `python_client` folder. Also, checkout diff --git a/pyproject.toml b/pyproject.toml index 58bd8c3..403ace8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,10 @@ flake8 = "^3.9.2" rmtest = "^0.7.0" nose = "^1.3.7" ml2rt = "^0.2.0" +tox = ">=3.23.1" +tox-poetry = "^0.3.0" +bandit = "^1.7.0" +pylint = "^2.8.2" [tool.poetry.urls] url = "https://redisai.io" diff --git a/requirements.txt b/requirements.txt index 1c010d2..74752c0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ poetry>=1.1.6 tox>=3.23.1 -tox-poetry>=0.3.0 diff --git a/tox.ini b/tox.ini index 10a2707..c43a081 100644 --- a/tox.ini +++ b/tox.ini @@ -1,13 +1,22 @@ [tox] +requires = tox-poetry +skipsdist = true [flake8] max-complexity = 10 ignore = E501 -srcdir = redisai +srcdir = ./redisai exclude = .git, __pycache__, + dist, + .tox [testenv] commands = nosetests -vsx test + +[testenv:linters] +commands = + -flake8 ./redisai + -bandit redisai From ef3d224ca0b4c49b2392de91387d37e811fa204c Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Sun, 16 May 2021 13:49:53 +0300 Subject: [PATCH 04/34] linter cleanup --- redisai/__init__.py | 2 +- redisai/client.py | 4 ++-- redisai/command_builder.py | 2 +- redisai/pipeline.py | 1 - 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/redisai/__init__.py b/redisai/__init__.py index 9c90c68..a03dbac 100644 --- a/redisai/__init__.py +++ b/redisai/__init__.py @@ -1,3 +1,3 @@ -from .client import Client +from .client import Client # noqa __version__ = "1.0.2" diff --git a/redisai/client.py b/redisai/client.py index 21b09fb..09ef532 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -1,5 +1,5 @@ -from functools import wraps, partial -from typing import Union, AnyStr, ByteString, List, Sequence, Any +from functools import wraps +from typing import Union, AnyStr, ByteString, List, Sequence import warnings from redis import StrictRedis diff --git a/redisai/command_builder.py b/redisai/command_builder.py index 3087342..6804ee3 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -39,7 +39,7 @@ def modelset( args += ["INPUTS", *utils.listify(inputs)] args += ["OUTPUTS", *utils.listify(outputs)] chunk_size = 500 * 1024 * 1024 - data_chunks = [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)] + data_chunks = [data[i: i + chunk_size] for i in range(0, len(data), chunk_size)] # TODO: need a test case for this args += ["BLOB", *data_chunks] return args diff --git a/redisai/pipeline.py b/redisai/pipeline.py index 447f528..1415b25 100644 --- a/redisai/pipeline.py +++ b/redisai/pipeline.py @@ -1,4 +1,3 @@ -import warnings from functools import partial from typing import AnyStr, Union, Sequence From 34c46b56fca74c389b19774df86944f132fbd942 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Tue, 18 May 2021 09:33:17 +0300 Subject: [PATCH 05/34] docs update --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index 02cd5c3..fb66035 100644 --- a/README.rst +++ b/README.rst @@ -57,7 +57,7 @@ Development 3. Install dependencies. ```poetry install``` -[tox](https://tox.readthedocs.io/en/latest/) runs all tests as its default target. Running *tox* by itself will run unit tests. Ensure you have a running redis, with the redisai client loaded. +[tox](https://tox.readthedocs.io/en/latest/) runs all tests as its default target. Running *tox* by itself will run unit tests. Ensure you have a running redis, with the module loaded. `RedisAI example repo `_ shows few examples made using redisai-py under `python_client` folder. Also, checkout From e5893e239b38d871b1227c9d49fac9ff6505c4da Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Tue, 18 May 2021 16:07:18 +0300 Subject: [PATCH 06/34] sample docker, linters, tox updates --- .circleci/config.yml | 28 +++------------------------- Dockerfile | 15 +++++++++++---- README.rst | 2 +- pyproject.toml | 1 + tox.ini | 16 ++++++++-------- 5 files changed, 24 insertions(+), 38 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 9f191e1..818d59e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -46,9 +46,9 @@ commands: - v1-dependencies- - run: - name: install tox dependencies + name: install poetry command: | - pip install --user --quiet -r requirements.txt + pip install --user --quiet poetry - run: name: build @@ -69,19 +69,9 @@ commands: - save_cache: paths: - ./venv + - ~/.cache/pip key: v1-dependencies-{{ checksum "pyproject.toml" }} - # build and push docker - dockerize: - steps: - - checkout - - setup_remote_docker - - run: - name: build dockers - command: | - docker login -u redisfab -p $DOCKER_REDISFAB_PWD - docker build -t redislabs/redisai-py:edge . - docker push jobs: build: parameters: @@ -132,15 +122,3 @@ workflows: - build: <<: *on-any-branch <<: *python-versions - -# nightly: -# triggers: -# - schedule: -# cron: "0 0 * * *" -# filters: -# branches: -# only: -# - master -# jobs: -# - nightly: -# <<: *python-versions diff --git a/Dockerfile b/Dockerfile index f725834..e1f0b30 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,20 @@ ARG OSNICK=bionic ARG TARGET=cpu -FROM redislabs/redisai:edge-${TARGET}-${OSNICK} +FROM redislabs/redisai:edge-${TARGET}-${OSNICK} as builder RUN apt update && apt install -y python3 python3-pip ADD . /build WORKDIR /build -RUN pip3 install -r requirements.txt +RUN pip3 install poetry RUN poetry config virtualenvs.create false -RUN poetry install RUN poetry build -RUN pip3 install dist/redisai*.tar.gz + +### clean docker stage +FROM redislabs/redisai:edge-${TARGET}-${OSNICK} as runner + +RUN apt update && apt install -y python3 python3-pip +RUN rm -rf /var/cache/apt/ + +COPY --from=builder /build/dist/redisai*.tar.gz /tmp/ +RUN pip3 install /tmp/redisai*.tar.gz diff --git a/README.rst b/README.rst index fb66035..4c115bb 100644 --- a/README.rst +++ b/README.rst @@ -53,7 +53,7 @@ Development 1. Create a virtualenv to manage your python dependencies, and ensure it's active. ```virtualenv -v venv``` 2. Install [pypoetry](https://python-poetry.org/) to manage your dependencies. - ```pip install -r requirements.txt``` + ```pip install --user poetry``` 3. Install dependencies. ```poetry install``` diff --git a/pyproject.toml b/pyproject.toml index 403ace8..04e9142 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,6 +39,7 @@ tox = ">=3.23.1" tox-poetry = "^0.3.0" bandit = "^1.7.0" pylint = "^2.8.2" +vulture = "^2.3" [tool.poetry.urls] url = "https://redisai.io" diff --git a/tox.ini b/tox.ini index c43a081..f73ae42 100644 --- a/tox.ini +++ b/tox.ini @@ -1,22 +1,22 @@ [tox] -requires = tox-poetry skipsdist = true +envlist = linters [flake8] max-complexity = 10 ignore = E501 srcdir = ./redisai -exclude = - .git, - __pycache__, - dist, - .tox +exclude =.git,.tox,dist,doc,*/__pycache__/* [testenv] +whitelist_externals = find +commands_pre = + find . -type f -name "*.pyc" -delete commands = nosetests -vsx test [testenv:linters] commands = - -flake8 ./redisai - -bandit redisai + flake8 --show-source + vulture redisai --min-confidence 80 + bandit redisai/** From 3ca1c7e9ab6d1bdfd0136fe35eabe16094de4e04 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Tue, 18 May 2021 16:09:31 +0300 Subject: [PATCH 07/34] snyk added to the README, poetry pypi workflow --- .github/workflows/publish-pypi.yml | 55 +++++++++++++----------------- README.rst | 2 ++ 2 files changed, 26 insertions(+), 31 deletions(-) diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 10a87d6..7caeccd 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -1,50 +1,43 @@ name: Publish Pypi on: release: - types: [published] + types: [ published ] jobs: - publish: - name: publish + pytest: + name: Publish to PyPi runs-on: ubuntu-latest + env: + ACTIONS_ALLOW_UNSECURE_COMMANDS: true steps: - uses: actions/checkout@master - - name: Set up Python 3.6 + - name: Set up Python 3.7 uses: actions/setup-python@v1 with: - python-version: 3.6 + python-version: 3.7 - - name: Install twine - run: | - pip install twine - - - name: Install wheel - run: | - pip install wheel - - - name: Create a source distribution - run: | - python setup.py sdist + - name: Install Poetry + uses: dschep/install-poetry-action@v1.3 - - name: Create a wheel - run: | - python setup.py bdist_wheel + - name: Cache Poetry virtualenv + uses: actions/cache@v1 + id: cache + with: + path: ~/.virtualenvs + key: poetry-${{ hashFiles('**/poetry.lock') }} + restore-keys: | + poetry-${{ hashFiles('**/poetry.lock') }} - - name: Create a .pypirc + - name: Set Poetry config run: | - echo -e "[pypi]" >> ~/.pypirc - echo -e "username = __token__" >> ~/.pypirc - echo -e "password = ${{ secrets.PYPI_TOKEN }}" >> ~/.pypirc - echo -e "[testpypi]" >> ~/.pypirc - echo -e "username = __token__" >> ~/.pypirc - echo -e "password = ${{ secrets.TESTPYPI_TOKEN }}" >> ~/.pypirc + poetry config virtualenvs.in-project false + poetry config virtualenvs.path ~/.virtualenvs - - name: Publish to Test PyPI - if: github.event_name == 'release' - run: | - twine upload --skip-existing -r testpypi dist/* + - name: Install Dependencies + run: poetry install + if: steps.cache.outputs.cache-hit != 'true' - name: Publish to PyPI if: github.event_name == 'release' run: | - twine upload -r pypi dist/* + poetry publish -u __token__ -p ${{ secrets.PYPI_TOKEN }} --build diff --git a/README.rst b/README.rst index 4c115bb..21c4ea2 100644 --- a/README.rst +++ b/README.rst @@ -26,6 +26,8 @@ redisai-py .. image:: https://img.shields.io/discord/697882427875393627?style=flat-square :target: https://discord.gg/rTQm7UZ +.. image:: https://snyk.io/test/github/RedisAI/redisai-py/badge.svg?targetFile=pyproject.toml)](https://snyk.io/test/github/RedisAI/redisai-py?targetFile=pyproject.toml + redisai-py is the Python client for RedisAI. Checkout the `documentation `_ for API details and examples From 1161cb7d50f80a65a7c0b33195a87cb1ce3d6c52 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Tue, 18 May 2021 17:46:27 +0300 Subject: [PATCH 08/34] add support --- redisai/client.py | 49 ++++++++++++++++++++++++++++++++++++++ redisai/command_builder.py | 29 ++++++++++++++++++++++ 2 files changed, 78 insertions(+) diff --git a/redisai/client.py b/redisai/client.py index 21b09fb..3f4d4f8 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -532,6 +532,55 @@ def scriptrun( res = self.execute_command(*args) return res if not self.enable_postprocess else processor.scriptrun(res) + def scriptexecute( + self, + key: AnyStr, + function: AnyStr, + keys: Union[AnyStr, Sequence[AnyStr]], + inputs: Union[AnyStr, Sequence[AnyStr]] = None, + list_inputs: Sequence[Sequence[AnyStr]] = None, + outputs: Union[AnyStr, Sequence[AnyStr]] = None, + timeout: int = None, + ) -> str: + """ + Run an already set script. Similar to modelrun + + Parameters + ---------- + key : AnyStr + Script key + function : AnyStr + Name of the function in the ``script`` + keys : Union[AnyStr, Sequence[AnyStr]] + Either a squence of key names that the script will access before, during and + after its execution, or a tag which all those keys share. + inputs : Union[AnyStr, List[AnyStr]] + Tensor(s) which is already saved in the RedisAI using a tensorset call. These + tensors will be used as the input for the modelrun + list_inputs : Sequence[Sequence[AnyStr]] + list of inputs. + outputs : Union[AnyStr, List[AnyStr]] + keys on which the outputs to be saved. If those keys exist already, modelrun + will overwrite them with new values + timeout : int + The max number on milisecinds that may pass before the request is prossced + (meaning that the result will not be computed after that time and TIMEDOUT + is returned in that case) + + Returns + ------- + str + 'OK' if success, raise an exception otherwise + + Example + ------- + >>> con.scriptexecute('ket', 'bar', keys=['a', 'b', 'c'], inputs=['a', 'b'], outputs=['c']) + 'OK' + """ + args = builder.scriptexecute(key, function, keys, inputs, list_inputs, outputs, timeout) + res = self.execute_command(*args) + return res if not self.enable_postprocess else processor.scriptrun(res) + def scriptscan(self) -> List[List[AnyStr]]: """ Returns the list of all the script in the RedisAI server. Scriptscan API is diff --git a/redisai/command_builder.py b/redisai/command_builder.py index 3087342..c27195c 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -153,6 +153,35 @@ def scriptrun( ) return args +def scriptexecute( + name: AnyStr, + function: AnyStr, + keys: Union[AnyStr, Sequence[AnyStr]], + inputs: Union[AnyStr, Sequence[AnyStr]], + list_inputs: Sequence[Sequence[AnyStr]], + outputs: Union[AnyStr, Sequence[AnyStr]], + timeout: int, +) -> Sequence: + args = [ + "AI.SCRIPTEXECUTE", + name, + function, + "KEYS", + len(utils.listify(keys)), + *utils.listify(keys), + ] + + if inputs is not None: + args += ["INPUTS", len(utils.listify(inputs)), *utils.listify(inputs)] + if list_inputs is not None: + for li in list_inputs: + args += ["LIST_INPUTS", len(li), *li] + if outputs is not None: + args += ["OUTPUTS", len(utils.listify(outputs)), *utils.listify(outputs)] + if timeout is not None: + args += ["TIMEOUT", timeout] + + return args def scriptscan() -> Sequence: return ("AI._SCRIPTSCAN",) From aa8cff9d3db586a7771b188c086b99ba085d3c99 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Tue, 18 May 2021 17:46:35 +0300 Subject: [PATCH 09/34] add tests --- test/test.py | 177 ++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 176 insertions(+), 1 deletion(-) diff --git a/test/test.py b/test/test.py index 1dcf8fa..ea468c2 100644 --- a/test/test.py +++ b/test/test.py @@ -29,6 +29,12 @@ def __exit__(self, *args): script = r""" def bar(a, b): return a + b + +def bar_variadic(a, args : List[Tensor]): + return args[0] + args[1] + +def bar_two_lists(a: List[Tensor], b:List[Tensor]): + return a[0] + b[0] """ @@ -250,7 +256,7 @@ def test_run_tf_model(self): con.modeldel("m") self.assertRaises(ResponseError, con.modelget, "m") - def test_scripts(self): + def test_scripts_run(self): con = self.get_client() self.assertRaises(ResponseError, con.scriptset, "ket", "cpu", "return 1") con.scriptset("ket", "cpu", script) @@ -272,6 +278,175 @@ def test_scripts(self): con.scriptdel("ket") self.assertRaises(ResponseError, con.scriptget, "ket") + def test_scripts_execute_basic(self): + con = self.get_client() + self.assertRaises(ResponseError, con.scriptset, "ket", "cpu", "return 1") + con.scriptset("ket", "cpu", script) + con.tensorset("a", (2, 3), dtype="float") + con.tensorset("b", (2, 3), dtype="float") + # try with bad arguments: + self.assertRaises( + ResponseError, con.scriptexecute, "ket", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"] + ) + con.scriptexecute("ket", "bar", keys=["a", "b", "c"], inputs=["a", "b"], outputs=["c"]) + tensor = con.tensorget("c", as_numpy=False) + self.assertEqual([4, 6], tensor["values"]) + script_det = con.scriptget("ket") + self.assertTrue(script_det["device"] == "cpu") + self.assertTrue(script_det["source"] == script) + script_det = con.scriptget("ket", meta_only=True) + self.assertTrue(script_det["device"] == "cpu") + self.assertNotIn("source", script_det) + con.scriptdel("ket") + self.assertRaises(ResponseError, con.scriptget, "ket") + + def test_scripts_execute_advanced(self): + con = self.get_client() + con.scriptset("myscript{1}", "cpu", script, "version1") + con.tensorset("a{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.tensorset("b{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + + for _ in range(0, 100): + con.scriptexecute("myscript{1}", "bar", keys=["{1}"], inputs=["a{1}", "b{1}"], outputs=["c{1}"]) + + info = con.infoget('myscript{1}') + self.assertEqual(info['key'], 'myscript{1}') + self.assertEqual(info['type'], 'SCRIPT') + self.assertEqual(info['backend'], 'TORCH') + self.assertEqual(info['tag'], 'version1') + self.assertTrue(info['duration'] > 0) + self.assertEqual(info['samples'], -1) + self.assertEqual(info['calls'], 100) + self.assertEqual(info['errors'], 0) + + values = con.tensorget("c{1}", as_numpy=False)['values'] + self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) + + def test_scripts_execute_list_input(self): + con = self.get_client() + con.scriptset("myscript{$}", "cpu", script, "version1") + con.tensorset("a{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.tensorset("b1{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.tensorset("b2{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + + for _ in range(0, 100): + con.scriptexecute("myscript{$}", 'bar_variadic', + keys=["{$}"], + inputs=["a{$}"], + list_inputs=[["b1{$}", "b2{$}"]], + outputs=["c{$}"]) + + info = con.infoget('myscript{$}') + + self.assertEqual(info['key'], 'myscript{$}') + self.assertEqual(info['type'], 'SCRIPT') + self.assertEqual(info['backend'], 'TORCH') + self.assertEqual(info['tag'], 'version1') + self.assertTrue(info['duration'] > 0) + self.assertEqual(info['samples'], -1) + self.assertEqual(info['calls'], 100) + self.assertEqual(info['errors'], 0) + + values = con.tensorget("c{$}", as_numpy=False)['values'] + self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) + + def test_scripts_execute_multiple_list_input(self): + con = self.get_client() + con.scriptset("myscript{$}", "cpu", script, "version1") + con.tensorset("a{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.tensorset("b{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + + for _ in range(0, 100): + con.scriptexecute('myscript{$}', 'bar_two_lists', + keys=["{$}"], + list_inputs=[["a{$}"], ["b{$}"]], + outputs=["c{$}"]) + + info = con.infoget('myscript{$}') + + self.assertEqual(info['key'], 'myscript{$}') + self.assertEqual(info['type'], 'SCRIPT') + self.assertEqual(info['backend'], 'TORCH') + self.assertEqual(info['tag'], 'version1') + self.assertTrue(info['duration'] > 0) + self.assertEqual(info['samples'], -1) + self.assertEqual(info['calls'], 100) + self.assertEqual(info['errors'], 0) + + values = con.tensorget('c{$}', as_numpy=False)['values'] + self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) + + def test_scripts_execute_errors(self): + con = self.get_client() + con.scriptset("ket{1}", "cpu", script, tag="version1") + con.tensorset("a{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.tensorset("b{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + + con.delete("EMPTY{1}") + # ERR no script at key from SCRIPTGET + self.assertRaises(ResponseError, con.scriptget, "EMPTY{1}") + + con.set('NOT_SCRIPT{1}', 'BAR') + # ERR wrong type from SCRIPTGET + self.assertRaises(ResponseError, con.scriptget, 'NOT_SCRIPT{1}') + + con.delete('EMPTY{1}') + # ERR no script at key from SCRIPTEXECUTE + self.assertRaises(ResponseError, con.scriptexecute, 'EMPTY{1}', 'bar', + keys=['{1}'], inputs=['b{1}'], outputs=['c{1}']) + + con.set('NOT_SCRIPT{1}', 'BAR') + # ERR wrong type from SCRIPTEXECUTE + self.assertRaises(ResponseError, con.scriptexecute, 'NOT_SCRIPT{1}', 'bar', + keys=['{1}'], inputs=['b{1}'], outputs=['c{1}']) + + con.delete('EMPTY{1}') + # ERR Input key is empty + self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', + keys=['{1}'], inputs=['EMPTY{1}', 'b{1}'], outputs=['c{1}']) + + con.set('NOT_TENSOR{1}', 'BAR') + # ERR Input key not tensor + self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', + keys=['{1}'], inputs=['NOT_TENSOR{1}', 'b{1}'], outputs=['c{1}']) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', + keys=['{1}'], inputs=['b{1}'], outputs=['c{1}']) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', keys=['{1}'], inputs=['b{1}'], outputs=[]) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', keys=['{1}'], inputs=[], outputs=[]) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', keys=[], inputs=[], outputs=[]) + + def test_scripts_execute_variadic_errors(self): + con = self.get_client() + con.scriptset("ket{$}", "cpu", script, tag="version1") + con.tensorset("a{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.tensorset("b{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + + con.delete('EMPTY{$}') + # ERR Variadic input key is empty + self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', + keys=['{$}'], inputs=['a{$}'], list_inputs=[['EMPTY{$}', 'b{$}']], outputs=['c{$}']) + + con.set('NOT_TENSOR{$}', 'BAR') + # ERR Variadic input key not tensor + self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', + keys=['{$}'], inputs=['a{$}'], list_inputs=[['NOT_TENSOR{$}', 'b{$}']], outputs=['c{$}']) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', + keys=['{$}'], inputs=['b{$}', '${$}'], outputs=['c{$}']) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', + keys=['{$}'], inputs=['b{$}'], list_inputs=[[]], outputs=[]) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', + keys=['{$}'], inputs=[], list_inputs=[[]], outputs=[]) + + self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', + keys=['{$}'], list_inputs=[['a{$}'], ['b{$}']], outputs=[]) + def test_run_onnxml_model(self): mlmodel_path = os.path.join(MODEL_DIR, "boston.onnx") onnxml_model = load_model(mlmodel_path) From 044fa97979c373350c7b6e0d982422927d2e11ae Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Tue, 18 May 2021 17:57:37 +0300 Subject: [PATCH 10/34] tox, with poetry --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 818d59e..3b41430 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -46,9 +46,9 @@ commands: - v1-dependencies- - run: - name: install poetry + name: install poetry and tox command: | - pip install --user --quiet poetry + pip install --user --quiet poetry tox - run: name: build From 24e0fef33fa43d70de40d5e935b5de716d2b4f52 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Wed, 19 May 2021 08:47:31 +0300 Subject: [PATCH 11/34] ai, from bloom --- requirements.txt => .circleci/circle_requirements.txt | 1 + .circleci/config.yml | 8 ++++---- tox.ini | 4 ++-- 3 files changed, 7 insertions(+), 6 deletions(-) rename requirements.txt => .circleci/circle_requirements.txt (59%) diff --git a/requirements.txt b/.circleci/circle_requirements.txt similarity index 59% rename from requirements.txt rename to .circleci/circle_requirements.txt index 74752c0..1c010d2 100644 --- a/requirements.txt +++ b/.circleci/circle_requirements.txt @@ -1,2 +1,3 @@ poetry>=1.1.6 tox>=3.23.1 +tox-poetry>=0.3.0 diff --git a/.circleci/config.yml b/.circleci/config.yml index 3b41430..071f3ec 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -46,9 +46,9 @@ commands: - v1-dependencies- - run: - name: install poetry and tox + name: install tox dependencies command: | - pip install --user --quiet poetry tox + pip install --user --quiet -r .circleci/circle_requirements.txt - run: name: build @@ -64,11 +64,11 @@ commands: - run: name: run tests command: - tox + tox -e nosetests - save_cache: paths: - - ./venv + - ./.tox - ~/.cache/pip key: v1-dependencies-{{ checksum "pyproject.toml" }} diff --git a/tox.ini b/tox.ini index f73ae42..e386ec9 100644 --- a/tox.ini +++ b/tox.ini @@ -1,6 +1,6 @@ [tox] skipsdist = true -envlist = linters +#envlist = linters [flake8] max-complexity = 10 @@ -8,7 +8,7 @@ ignore = E501 srcdir = ./redisai exclude =.git,.tox,dist,doc,*/__pycache__/* -[testenv] +[testenv:nosetests] whitelist_externals = find commands_pre = find . -type f -name "*.pyc" -delete From d71fe6282fa0a35604141b4304510b387dae38d5 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Wed, 19 May 2021 08:55:04 +0300 Subject: [PATCH 12/34] latest --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 071f3ec..2b9dfb7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -114,6 +114,7 @@ python-versions: &python-versions - "3.7.9" - "3.8.9" - "3.9.4" + - "latest" workflows: version: 2 From b9b31e34957453a7d9a5550d5527cb01bf290866 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Wed, 19 May 2021 09:01:36 +0300 Subject: [PATCH 13/34] linter comments --- redisai/client.py | 4 ++-- redisai/dag.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index 09ef532..5c7ce08 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -65,8 +65,8 @@ def pipeline(self, transaction: bool = True, shard_hint: bool = None) -> "Pipeli self.enable_postprocess, self.connection_pool, self.response_callbacks, - transaction=True, - shard_hint=None, + transaction=transaction, + shard_hint=shard_hint, ) def dag( diff --git a/redisai/dag.py b/redisai/dag.py index 1b60529..bf6e971 100644 --- a/redisai/dag.py +++ b/redisai/dag.py @@ -11,7 +11,7 @@ class Dag: - def __init__(self, load, persist, executor, readonly=False, postprocess=True): + def __init__(self, load, persist, executor, readonly=False): self.result_processors = [] self.enable_postprocess = True if readonly: From fbc5cda64c707180ee0df5af9cd1d13edbaadf18 Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Wed, 19 May 2021 09:19:45 +0300 Subject: [PATCH 14/34] lifting dockers --- .circleci/config.yml | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2b9dfb7..254c361 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -72,6 +72,22 @@ commands: - ~/.cache/pip key: v1-dependencies-{{ checksum "pyproject.toml" }} + docker: + parameters: + docker_version: + type: string + default: "edge" + steps: + - setup_remote_docker + - run: + name: dockers + description: Build and release docker + command: | + bash <(curl -fsSL https://get.docker.com) + docker login -u redisfab -p $DOCKER_REDISFAB_PWD + docker build -t redisgraph:<> . + docker push + jobs: build: parameters: @@ -87,6 +103,20 @@ jobs: path: test-reports destination: test-reports + # since this is used by cron, we by default build against latest + build_and_publish: + parameters: + docker_version: + type: string + default: "edge" + docker: + - image: circleci/python:latest + - image: redislabs/rediai:edge-cpu-bionic + + steps: + - build_and_test + - docker + nightly: parameters: python_version: @@ -106,6 +136,12 @@ on-any-branch: &on-any-branch tags: ignore: /.*/ +on-master: &on-master + filters: + branches: + only: + - master + python-versions: &python-versions matrix: parameters: @@ -123,3 +159,11 @@ workflows: - build: <<: *on-any-branch <<: *python-versions + + nightly: + triggers: + - schedule: + cron: "0 0 * * *" + <<: *on-master + jobs: + - build_and_publish From db860f6c4ecb5a2f9e86cf49a252ee508bcde0ba Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Wed, 19 May 2021 11:28:55 +0300 Subject: [PATCH 15/34] unifying the pypi settings removing second poetry build call, since they can be done in one --- .circleci/config.yml | 5 ++--- pyproject.toml | 11 +++++------ 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 254c361..1deac19 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -51,10 +51,9 @@ commands: pip install --user --quiet -r .circleci/circle_requirements.txt - run: - name: build + name: build sdist and wheels command: | - poetry build --format sdist - poetry build --format wheel + poetry build - run: name: lint diff --git a/pyproject.toml b/pyproject.toml index 04e9142..adc9d4b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -11,16 +11,15 @@ packages = [ ] classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "License :: OSI Approved :: BSD License", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3 :: Only", + 'Topic :: Database', + 'Programming Language :: Python', + 'Intended Audience :: Developers', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', - 'Topic :: Database' + 'License :: OSI Approved :: BSD License', + 'Development Status :: 5 - Production/Stable' ] [tool.poetry.dependencies] From 5bf14262dc4cd153bb8aa24360e2b71e4f95d41e Mon Sep 17 00:00:00 2001 From: "Chayim I. Kirshen" Date: Wed, 19 May 2021 16:49:58 +0300 Subject: [PATCH 16/34] linters --- redisai/command_builder.py | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/redisai/command_builder.py b/redisai/command_builder.py index ed67687..53e422b 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -64,7 +64,7 @@ def modelstore( "Inputs and outputs keywords should not be specified for this backend" ) chunk_size = 500 * 1024 * 1024 # TODO: this should be configurable. - data_chunks = [data[i : i + chunk_size] for i in range(0, len(data), chunk_size)] + data_chunks = [data[i: i + chunk_size] for i in range(0, len(data), chunk_size)] # TODO: need a test case for this args += ["BLOB", *data_chunks] return args diff --git a/tox.ini b/tox.ini index e386ec9..a93d9a6 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ skipsdist = true [flake8] max-complexity = 10 -ignore = E501 +ignore = E501,C901 srcdir = ./redisai exclude =.git,.tox,dist,doc,*/__pycache__/* From 0056bc342743c91d53366a99c7cd8026ef9216c1 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Thu, 20 May 2021 10:08:57 +0300 Subject: [PATCH 17/34] reset --- .circleci/circle_requirements.txt | 3 - .circleci/config.yml | 193 +++++++++-------------------- .github/workflows/publish-pypi.yml | 55 ++++---- Dockerfile | 20 --- README.rst | 17 +-- pyproject.toml | 52 -------- redisai/dag.py | 7 +- redisai/pipeline.py | 6 +- redisai/postprocessor.py | 3 +- redisai/utils.py | 4 +- tox.ini | 22 ---- 11 files changed, 105 insertions(+), 277 deletions(-) delete mode 100644 .circleci/circle_requirements.txt delete mode 100644 Dockerfile delete mode 100644 pyproject.toml delete mode 100644 tox.ini diff --git a/.circleci/circle_requirements.txt b/.circleci/circle_requirements.txt deleted file mode 100644 index 1c010d2..0000000 --- a/.circleci/circle_requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -poetry>=1.1.6 -tox>=3.23.1 -tox-poetry>=0.3.0 diff --git a/.circleci/config.yml b/.circleci/config.yml index 1deac19..9e7918f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,168 +1,97 @@ -version: 2.1 -commands: - - abort_for_docs: - steps: - - run: - name: Avoid tests for docs - command: | - if [[ $CIRCLE_BRANCH == *docs ]]; then - echo "Identifies as documents PR, no testing required" - circleci step halt - fi - - abort_for_noci: - steps: - - run: - name: Ignore CI for specific branches - command: | - if [[ $CIRCLE_BRANCH == *noci ]]; then - echo "Identifies as actively ignoring CI, no testing required." - circleci step halt - fi +# Python CircleCI 2.0 configuration file +# +# Check https://circleci.com/docs/2.0/language-python/ for more details +# +version: 2 +jobs: + build: + docker: + - image: circleci/python:3.6.9 + - image: redislabs/redisai:edge-cpu-bionic + working_directory: ~/repo - early_return_for_forked_pull_requests: - description: >- - If this build is from a fork, stop executing the current job and return success. - This is useful to avoid steps that will fail due to missing credentials. - steps: - - run: - name: Early return if this build is from a forked PR - command: | - if [[ -n "$CIRCLE_PR_NUMBER" ]]; then - echo "Nothing to do for forked PRs, so marking this step successful" - circleci step halt - fi - - build_and_test: steps: - checkout + - restore_cache: # Download and cache dependencies keys: - - v1-dependencies-{{ checksum "pyproject.toml" }} + - v1-dependencies-{{ checksum "test-requirements.txt" }} # fallback to using the latest cache if no exact match is found - v1-dependencies- - run: - name: install tox dependencies - command: | - pip install --user --quiet -r .circleci/circle_requirements.txt - - - run: - name: build sdist and wheels - command: | - poetry build - - - run: - name: lint + name: install dependencies command: | - tox -e linters - - - run: - name: run tests - command: - tox -e nosetests + virtualenv --no-site-packages venv + . venv/bin/activate + pip install -r test-requirements.txt - save_cache: paths: - - ./.tox - - ~/.cache/pip - key: v1-dependencies-{{ checksum "pyproject.toml" }} - - docker: - parameters: - docker_version: - type: string - default: "edge" - steps: - - setup_remote_docker + - ./venv + key: v1-dependencies-{{ checksum "test-requirements.txt" }} + - run: - name: dockers - description: Build and release docker + name: run tests command: | - bash <(curl -fsSL https://get.docker.com) - docker login -u redisfab -p $DOCKER_REDISFAB_PWD - docker build -t redisgraph:<> . - docker push + . venv/bin/activate + nosetests --with-coverage -vsx test + codecov -jobs: - build: - parameters: - python_version: - type: string - docker: - - image: circleci/python:<> - - image: redislabs/redisai:edge-cpu-bionic - - steps: - - build_and_test - store_artifacts: path: test-reports destination: test-reports - # since this is used by cron, we by default build against latest - build_and_publish: - parameters: - docker_version: - type: string - default: "edge" + build_nightly: docker: - - image: circleci/python:latest - - image: redislabs/rediai:edge-cpu-bionic + - image: circleci/python:3.6.9 + - image: redislabs/redisai:edge-cpu-bionic - steps: - - build_and_test - - docker + working_directory: ~/repo - nightly: - parameters: - python_version: - type: string - docker: - - image: circleci/python:<> - - image: redislabs/redisai:edge-cpu-bionic steps: - - build_and_test - - dockerize - -on-any-branch: &on-any-branch - filters: - branches: - only: - - /.*/ - tags: - ignore: /.*/ - -on-master: &on-master - filters: - branches: - only: - - master - -python-versions: &python-versions - matrix: - parameters: - python_version: - - "3.6.9" - - "3.7.9" - - "3.8.9" - - "3.9.4" - - "latest" + - checkout + + - restore_cache: # Download and cache dependencies + keys: + - v1-dependencies-{{ checksum "test-requirements.txt" }} + # fallback to using the latest cache if no exact match is found + - v1-dependencies- + + - run: + name: install dependencies + command: | + virtualenv --no-site-packages venv + . venv/bin/activate + pip install -r test-requirements.txt + + - save_cache: + paths: + - ./venv + key: v1-dependencies-{{ checksum "test-requirements.txt" }} + + - run: + name: run tests + command: | + . venv/bin/activate + nosetests -vsx test + + # no need for store_artifacts on nightly builds workflows: version: 2 commit: jobs: - - build: - <<: *on-any-branch - <<: *python-versions - + - build nightly: triggers: - schedule: cron: "0 0 * * *" - <<: *on-master + filters: + branches: + only: + - master jobs: - - build_and_publish + - build_nightly diff --git a/.github/workflows/publish-pypi.yml b/.github/workflows/publish-pypi.yml index 7caeccd..10a87d6 100644 --- a/.github/workflows/publish-pypi.yml +++ b/.github/workflows/publish-pypi.yml @@ -1,43 +1,50 @@ name: Publish Pypi on: release: - types: [ published ] + types: [published] jobs: - pytest: - name: Publish to PyPi + publish: + name: publish runs-on: ubuntu-latest - env: - ACTIONS_ALLOW_UNSECURE_COMMANDS: true steps: - uses: actions/checkout@master - - name: Set up Python 3.7 + - name: Set up Python 3.6 uses: actions/setup-python@v1 with: - python-version: 3.7 + python-version: 3.6 - - name: Install Poetry - uses: dschep/install-poetry-action@v1.3 + - name: Install twine + run: | + pip install twine - - name: Cache Poetry virtualenv - uses: actions/cache@v1 - id: cache - with: - path: ~/.virtualenvs - key: poetry-${{ hashFiles('**/poetry.lock') }} - restore-keys: | - poetry-${{ hashFiles('**/poetry.lock') }} + - name: Install wheel + run: | + pip install wheel + + - name: Create a source distribution + run: | + python setup.py sdist - - name: Set Poetry config + - name: Create a wheel run: | - poetry config virtualenvs.in-project false - poetry config virtualenvs.path ~/.virtualenvs + python setup.py bdist_wheel - - name: Install Dependencies - run: poetry install - if: steps.cache.outputs.cache-hit != 'true' + - name: Create a .pypirc + run: | + echo -e "[pypi]" >> ~/.pypirc + echo -e "username = __token__" >> ~/.pypirc + echo -e "password = ${{ secrets.PYPI_TOKEN }}" >> ~/.pypirc + echo -e "[testpypi]" >> ~/.pypirc + echo -e "username = __token__" >> ~/.pypirc + echo -e "password = ${{ secrets.TESTPYPI_TOKEN }}" >> ~/.pypirc + + - name: Publish to Test PyPI + if: github.event_name == 'release' + run: | + twine upload --skip-existing -r testpypi dist/* - name: Publish to PyPI if: github.event_name == 'release' run: | - poetry publish -u __token__ -p ${{ secrets.PYPI_TOKEN }} --build + twine upload -r pypi dist/* diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index e1f0b30..0000000 --- a/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -ARG OSNICK=bionic -ARG TARGET=cpu - -FROM redislabs/redisai:edge-${TARGET}-${OSNICK} as builder - -RUN apt update && apt install -y python3 python3-pip -ADD . /build -WORKDIR /build -RUN pip3 install poetry -RUN poetry config virtualenvs.create false -RUN poetry build - -### clean docker stage -FROM redislabs/redisai:edge-${TARGET}-${OSNICK} as runner - -RUN apt update && apt install -y python3 python3-pip -RUN rm -rf /var/cache/apt/ - -COPY --from=builder /build/dist/redisai*.tar.gz /tmp/ -RUN pip3 install /tmp/redisai*.tar.gz diff --git a/README.rst b/README.rst index 21c4ea2..f6b0bdc 100644 --- a/README.rst +++ b/README.rst @@ -18,15 +18,13 @@ redisai-py :target: https://codecov.io/gh/RedisAI/redisai-py .. image:: https://readthedocs.org/projects/redisai-py/badge/?version=latest - :target: https://redisai-py.readthedocs.io/en/latest/?badge=latest + :target: https://redisai-py.readthedocs.io/en/latest/?badge=latest .. image:: https://img.shields.io/badge/Forum-RedisAI-blue :target: https://forum.redislabs.com/c/modules/redisai .. image:: https://img.shields.io/discord/697882427875393627?style=flat-square - :target: https://discord.gg/rTQm7UZ - -.. image:: https://snyk.io/test/github/RedisAI/redisai-py/badge.svg?targetFile=pyproject.toml)](https://snyk.io/test/github/RedisAI/redisai-py?targetFile=pyproject.toml + :target: https://discord.gg/rTQm7UZ redisai-py is the Python client for RedisAI. Checkout the `documentation `_ for API details and examples @@ -49,17 +47,6 @@ Installation $ pip install ml2rt -Development ------------ - -1. Create a virtualenv to manage your python dependencies, and ensure it's active. - ```virtualenv -v venv``` -2. Install [pypoetry](https://python-poetry.org/) to manage your dependencies. - ```pip install --user poetry``` -3. Install dependencies. - ```poetry install``` - -[tox](https://tox.readthedocs.io/en/latest/) runs all tests as its default target. Running *tox* by itself will run unit tests. Ensure you have a running redis, with the module loaded. `RedisAI example repo `_ shows few examples made using redisai-py under `python_client` folder. Also, checkout diff --git a/pyproject.toml b/pyproject.toml deleted file mode 100644 index e2cfe99..0000000 --- a/pyproject.toml +++ /dev/null @@ -1,52 +0,0 @@ -[tool.poetry] -name = "redisai" -version = "1.0.2" -description = "RedisAI Python Client" -authors = ["RedisLabs "] -license = "BSD-3-Clause" -readme = "README.rst" - -packages = [ - { include = 'redisai' }, -] - -classifiers = [ - 'Topic :: Database', - 'Programming Language :: Python', - 'Intended Audience :: Developers', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'License :: OSI Approved :: BSD License', - 'Development Status :: 5 - Production/Stable' -] - -[tool.poetry.dependencies] -python = "^3.6" -redis = ">=2.10" -hiredis = ">=0.20" -numpy = ">=1.19.5" -six = ">=1.10.0" -Deprecated = "^1.2.12" - -[tool.poetry.dev-dependencies] -codecov = "^2.1.11" -flake8 = "^3.9.2" -rmtest = "^0.7.0" -nose = "^1.3.7" -ml2rt = "^0.2.0" -tox = ">=3.23.1" -tox-poetry = "^0.3.0" -bandit = "^1.7.0" -pylint = "^2.8.2" -vulture = "^2.3" - -[tool.poetry.urls] -url = "https://redisai.io" -repository = "https://github.com/RedisAI/redisai-py" - - -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" diff --git a/redisai/dag.py b/redisai/dag.py index 5b77c5b..1b60529 100644 --- a/redisai/dag.py +++ b/redisai/dag.py @@ -1,16 +1,17 @@ from functools import partial -from typing import Any, AnyStr, List, Sequence, Union +from typing import AnyStr, Union, Sequence, Any, List import numpy as np -from redisai import command_builder as builder from redisai.postprocessor import Processor +from redisai import command_builder as builder + processor = Processor() class Dag: - def __init__(self, load, persist, executor, readonly=False): + def __init__(self, load, persist, executor, readonly=False, postprocess=True): self.result_processors = [] self.enable_postprocess = True if readonly: diff --git a/redisai/pipeline.py b/redisai/pipeline.py index 53d28b2..447f528 100644 --- a/redisai/pipeline.py +++ b/redisai/pipeline.py @@ -1,12 +1,14 @@ +import warnings from functools import partial -from typing import AnyStr, Sequence, Union +from typing import AnyStr, Union, Sequence import numpy as np -import redis from redisai import command_builder as builder +import redis from redisai.postprocessor import Processor + processor = Processor() diff --git a/redisai/postprocessor.py b/redisai/postprocessor.py index 42bd141..6cd599b 100644 --- a/redisai/postprocessor.py +++ b/redisai/postprocessor.py @@ -63,15 +63,14 @@ def infoget(res): decoder = staticmethod(decoder) decoding_functions = ( "loadbackend", - "modelstore", "modelset", "modeldel", - "modelexecute", "modelrun", "tensorset", "scriptset", "scriptdel", "scriptrun", + "scriptexecute" "inforeset", ) for fn in decoding_functions: diff --git a/redisai/utils.py b/redisai/utils.py index ca8007f..3723bc5 100644 --- a/redisai/utils.py +++ b/redisai/utils.py @@ -1,7 +1,7 @@ -from typing import AnyStr, ByteString, Callable, List, Sequence, Union - +from typing import Union, ByteString, Sequence, List, AnyStr, Callable import numpy as np + dtype_dict = { "float": "FLOAT", "double": "DOUBLE", diff --git a/tox.ini b/tox.ini deleted file mode 100644 index a93d9a6..0000000 --- a/tox.ini +++ /dev/null @@ -1,22 +0,0 @@ -[tox] -skipsdist = true -#envlist = linters - -[flake8] -max-complexity = 10 -ignore = E501,C901 -srcdir = ./redisai -exclude =.git,.tox,dist,doc,*/__pycache__/* - -[testenv:nosetests] -whitelist_externals = find -commands_pre = - find . -type f -name "*.pyc" -delete -commands = - nosetests -vsx test - -[testenv:linters] -commands = - flake8 --show-source - vulture redisai --min-confidence 80 - bandit redisai/** From d17b2788d7aa10cffaea1f2195100efc15d73fca Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Thu, 20 May 2021 13:18:57 +0300 Subject: [PATCH 18/34] Some PR fixes --- redisai/client.py | 39 ++++--- redisai/command_builder.py | 29 +++-- redisai/postprocessor.py | 2 +- test/test.py | 214 +++++++++++++------------------------ 4 files changed, 117 insertions(+), 167 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index e7adcd8..c7b7239 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -622,6 +622,7 @@ def scriptdel(self, key: AnyStr) -> str: res = self.execute_command(*args) return res if not self.enable_postprocess else processor.scriptdel(res) + @deprecated(version="1.2.0", reason="Use scriptexecute instead") def scriptrun( self, key: AnyStr, @@ -642,7 +643,7 @@ def scriptrun( Tensor(s) which is already saved in the RedisAI using a tensorset call. These tensors will be used as the input for the modelrun outputs : Union[AnyStr, List[AnyStr]] - keys on which the outputs to be saved. If those keys exist already, modelrun + keys on which the outputs to be saved. If those keys exist already, scriptrun will overwrite them with new values Returns @@ -662,37 +663,38 @@ def scriptrun( def scriptexecute( self, key: AnyStr, - function: AnyStr, + function: str, keys: Union[AnyStr, Sequence[AnyStr]], - inputs: Union[AnyStr, Sequence[AnyStr]] = None, - list_inputs: Sequence[Sequence[AnyStr]] = None, + inputs: Union[AnyStr, Sequence[Union[AnyStr, Sequence[AnyStr]]]] = None, outputs: Union[AnyStr, Sequence[AnyStr]] = None, timeout: int = None, ) -> str: """ - Run an already set script. Similar to modelrun + Run an already set script. Similar to modelexecute Parameters ---------- key : AnyStr Script key - function : AnyStr + function : str Name of the function in the ``script`` keys : Union[AnyStr, Sequence[AnyStr]] Either a squence of key names that the script will access before, during and after its execution, or a tag which all those keys share. - inputs : Union[AnyStr, List[AnyStr]] - Tensor(s) which is already saved in the RedisAI using a tensorset call. These - tensors will be used as the input for the modelrun - list_inputs : Sequence[Sequence[AnyStr]] - list of inputs. + inputs : Union[AnyStr, Sequence[Union[AnyStr, Sequence[AnyStr]]]] + The inputs can be tensor key name, string , int or float. + These inputs will be used as the input for the scriptexecute function. + The order of the input should be aligned with the order of their respected + parameter at the function signature. + When one of the elements in the inputs list is a list (or a tuple), that element + will be treated as LIST_INPUTS in the command executaion. outputs : Union[AnyStr, List[AnyStr]] - keys on which the outputs to be saved. If those keys exist already, modelrun - will overwrite them with new values + keys on which the outputs to be saved. If those keys exist already, scriptexecute + will overwrite them with new values. timeout : int The max number on milisecinds that may pass before the request is prossced (meaning that the result will not be computed after that time and TIMEDOUT - is returned in that case) + is returned in that case). Returns ------- @@ -703,10 +705,15 @@ def scriptexecute( ------- >>> con.scriptexecute('ket', 'bar', keys=['a', 'b', 'c'], inputs=['a', 'b'], outputs=['c']) 'OK' + >>> con.scriptexecute('myscript{tag}', 'addn', + >>> keys=['{tag}'], + >>> inputs=['mytensor1{tag}', ['mytensor2{tag}', 'mytensor3{tag}']], + >>> outputs=['result{tag}']) + 'OK' """ - args = builder.scriptexecute(key, function, keys, inputs, list_inputs, outputs, timeout) + args = builder.scriptexecute(key, function, keys, inputs, outputs, timeout) res = self.execute_command(*args) - return res if not self.enable_postprocess else processor.scriptrun(res) + return res if not self.enable_postprocess else processor.scriptexecute(res) def scriptscan(self) -> List[List[AnyStr]]: """ diff --git a/redisai/command_builder.py b/redisai/command_builder.py index 4dd118e..785c7b8 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -228,10 +228,12 @@ def scriptdel(name: AnyStr) -> Sequence: def scriptrun( name: AnyStr, - function: AnyStr, + function: str, inputs: Union[AnyStr, Sequence[AnyStr]], outputs: Union[AnyStr, Sequence[AnyStr]], ) -> Sequence: + if name is None or function is None: + raise ValueError("Missing required arguments for script run command") args = ( "AI.SCRIPTRUN", name, @@ -245,13 +247,14 @@ def scriptrun( def scriptexecute( name: AnyStr, - function: AnyStr, + function: str, keys: Union[AnyStr, Sequence[AnyStr]], - inputs: Union[AnyStr, Sequence[AnyStr]], - list_inputs: Sequence[Sequence[AnyStr]], + inputs: Union[AnyStr, Sequence[Union[AnyStr, Sequence[AnyStr]]]], outputs: Union[AnyStr, Sequence[AnyStr]], timeout: int, ) -> Sequence: + if name is None or function is None or keys is None: + raise ValueError("Missing required arguments for script execute command") args = [ "AI.SCRIPTEXECUTE", name, @@ -262,10 +265,20 @@ def scriptexecute( ] if inputs is not None: - args += ["INPUTS", len(utils.listify(inputs)), *utils.listify(inputs)] - if list_inputs is not None: - for li in list_inputs: - args += ["LIST_INPUTS", len(li), *li] + temp_inputs = [] + if not isinstance(inputs, (list, tuple)): + args += ["INPUTS", 1, inputs] + else: + for elem in inputs: + if isinstance(elem, (list, tuple)): + if temp_inputs: + args += ["INPUTS", len(temp_inputs), *temp_inputs] + temp_inputs = [] + args += ["LIST_INPUTS", len(utils.listify(elem)), *utils.listify(elem)] + else: + temp_inputs.append(elem) + if temp_inputs: + args += ["INPUTS", len(temp_inputs), *temp_inputs] if outputs is not None: args += ["OUTPUTS", len(utils.listify(outputs)), *utils.listify(outputs)] if timeout is not None: diff --git a/redisai/postprocessor.py b/redisai/postprocessor.py index 6cd599b..1ddc3d2 100644 --- a/redisai/postprocessor.py +++ b/redisai/postprocessor.py @@ -70,7 +70,7 @@ def infoget(res): "scriptset", "scriptdel", "scriptrun", - "scriptexecute" + "scriptexecute", "inforeset", ) for fn in decoding_functions: diff --git a/test/test.py b/test/test.py index e8e7b71..084c9f6 100644 --- a/test/test.py +++ b/test/test.py @@ -36,6 +36,27 @@ def bar_variadic(a, args : List[Tensor]): def bar_two_lists(a: List[Tensor], b:List[Tensor]): return a[0] + b[0] + +def addn(a, args : List[Tensor]): + return a + torch.stack(args).sum() +""" + +script_with_redis_commands = r""" +def redis_string_int_to_tensor(redis_value: Any): + return torch.tensor(int(str(redis_value))) + +def int_set_get(key:str, value:int): + redis.execute("SET", key, str(value)) + res = redis.execute("GET", key) + return redis_string_int_to_tensor(res) + +def func(a: Tensor, b: int, c: List[Tensor], d:str, e:List[float]): + redis.execute("SET", "key{1}", str(b)) + redis.execute("SET", d, str(torch.stack(c).sum().data[0])) + res = redis.execute("GET", d) + temp = redis_string_int_to_tensor(res) + redis.execute("DEL", d) + return torch.cat([a, temp, torch.tensor(e)], 0) """ @@ -354,39 +375,31 @@ def test_run_tf_model(self): con.modeldel("m") self.assertRaises(ResponseError, con.modelget, "m") - def test_scripts_run(self): + # AI.SCRIPTRUN is deprecated by AI.SCRIPTEXECUTE + def test_deprecated_script_run(self): con = self.get_client() self.assertRaises(ResponseError, con.scriptset, "ket", "cpu", "return 1") con.scriptset("ket", "cpu", script) con.tensorset("a", (2, 3), dtype="float") con.tensorset("b", (2, 3), dtype="float") - # try with bad arguments: - self.assertRaises( - ResponseError, con.scriptrun, "ket", "bar", inputs=["a"], outputs=["c"] - ) con.scriptrun("ket", "bar", inputs=["a", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) - script_det = con.scriptget("ket") - self.assertTrue(script_det["device"] == "cpu") - self.assertTrue(script_det["source"] == script) - script_det = con.scriptget("ket", meta_only=True) - self.assertTrue(script_det["device"] == "cpu") - self.assertNotIn("source", script_det) - con.scriptdel("ket") - self.assertRaises(ResponseError, con.scriptget, "ket") - def test_scripts_execute_basic(self): + def test_scripts_execute(self): con = self.get_client() self.assertRaises(ResponseError, con.scriptset, "ket", "cpu", "return 1") con.scriptset("ket", "cpu", script) con.tensorset("a", (2, 3), dtype="float") con.tensorset("b", (2, 3), dtype="float") + # try with bad arguments: - self.assertRaises( - ResponseError, con.scriptexecute, "ket", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"] - ) + with self.assertRaises(ValueError) as e: + con.scriptexecute("ket", function=None, keys=None) + self.assertEqual(str(e.exception), "Missing required arguments for script execute command") + self.assertRaises(ResponseError, con.scriptexecute, "ket", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"]) + con.scriptexecute("ket", "bar", keys=["a", "b", "c"], inputs=["a", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) @@ -399,27 +412,19 @@ def test_scripts_execute_basic(self): con.scriptdel("ket") self.assertRaises(ResponseError, con.scriptget, "ket") - def test_scripts_execute_advanced(self): - con = self.get_client() con.scriptset("myscript{1}", "cpu", script, "version1") con.tensorset("a{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") con.tensorset("b{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.scriptexecute("myscript{1}", "bar", keys=["{1}"], inputs=["a{1}", "b{1}"], outputs=["c{1}"]) + values = con.tensorget("c{1}", as_numpy=False) + self.assertTrue(np.allclose(values["values"], [4.0, 6.0, 4.0, 6.0])) - for _ in range(0, 100): - con.scriptexecute("myscript{1}", "bar", keys=["{1}"], inputs=["a{1}", "b{1}"], outputs=["c{1}"]) - - info = con.infoget('myscript{1}') - self.assertEqual(info['key'], 'myscript{1}') - self.assertEqual(info['type'], 'SCRIPT') - self.assertEqual(info['backend'], 'TORCH') - self.assertEqual(info['tag'], 'version1') - self.assertTrue(info['duration'] > 0) - self.assertEqual(info['samples'], -1) - self.assertEqual(info['calls'], 100) - self.assertEqual(info['errors'], 0) - - values = con.tensorget("c{1}", as_numpy=False)['values'] - self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) + def test_scripts_commands_support(self): + con = self.get_client() + con.scriptset("myscript{1}", "cpu", script_with_redis_commands) + con.scriptexecute("myscript{1}", "int_set_get", keys=["{1}"], inputs=["x{1}", 3], outputs=["y{1}"]) + values = con.tensorget("y{1}", as_numpy=False) + self.assertTrue(np.allclose(values["values"], [3])) def test_scripts_execute_list_input(self): con = self.get_client() @@ -428,123 +433,48 @@ def test_scripts_execute_list_input(self): con.tensorset("b1{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") con.tensorset("b2{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - for _ in range(0, 100): - con.scriptexecute("myscript{$}", 'bar_variadic', - keys=["{$}"], - inputs=["a{$}"], - list_inputs=[["b1{$}", "b2{$}"]], - outputs=["c{$}"]) - - info = con.infoget('myscript{$}') - - self.assertEqual(info['key'], 'myscript{$}') - self.assertEqual(info['type'], 'SCRIPT') - self.assertEqual(info['backend'], 'TORCH') - self.assertEqual(info['tag'], 'version1') - self.assertTrue(info['duration'] > 0) - self.assertEqual(info['samples'], -1) - self.assertEqual(info['calls'], 100) - self.assertEqual(info['errors'], 0) + con.scriptexecute("myscript{$}", 'bar_variadic', + keys=["{$}"], + inputs=["a{$}", ["b1{$}", "b2{$}"]], + outputs=["c{$}"]) values = con.tensorget("c{$}", as_numpy=False)['values'] self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) - def test_scripts_execute_multiple_list_input(self): - con = self.get_client() - con.scriptset("myscript{$}", "cpu", script, "version1") - con.tensorset("a{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - con.tensorset("b{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - - for _ in range(0, 100): - con.scriptexecute('myscript{$}', 'bar_two_lists', - keys=["{$}"], - list_inputs=[["a{$}"], ["b{$}"]], - outputs=["c{$}"]) - - info = con.infoget('myscript{$}') - - self.assertEqual(info['key'], 'myscript{$}') - self.assertEqual(info['type'], 'SCRIPT') - self.assertEqual(info['backend'], 'TORCH') - self.assertEqual(info['tag'], 'version1') - self.assertTrue(info['duration'] > 0) - self.assertEqual(info['samples'], -1) - self.assertEqual(info['calls'], 100) - self.assertEqual(info['errors'], 0) - - values = con.tensorget('c{$}', as_numpy=False)['values'] - self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) - - def test_scripts_execute_errors(self): - con = self.get_client() - con.scriptset("ket{1}", "cpu", script, tag="version1") - con.tensorset("a{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - con.tensorset("b{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - - con.delete("EMPTY{1}") - # ERR no script at key from SCRIPTGET - self.assertRaises(ResponseError, con.scriptget, "EMPTY{1}") - - con.set('NOT_SCRIPT{1}', 'BAR') - # ERR wrong type from SCRIPTGET - self.assertRaises(ResponseError, con.scriptget, 'NOT_SCRIPT{1}') - - con.delete('EMPTY{1}') - # ERR no script at key from SCRIPTEXECUTE - self.assertRaises(ResponseError, con.scriptexecute, 'EMPTY{1}', 'bar', - keys=['{1}'], inputs=['b{1}'], outputs=['c{1}']) - - con.set('NOT_SCRIPT{1}', 'BAR') - # ERR wrong type from SCRIPTEXECUTE - self.assertRaises(ResponseError, con.scriptexecute, 'NOT_SCRIPT{1}', 'bar', - keys=['{1}'], inputs=['b{1}'], outputs=['c{1}']) - - con.delete('EMPTY{1}') - # ERR Input key is empty - self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', - keys=['{1}'], inputs=['EMPTY{1}', 'b{1}'], outputs=['c{1}']) + con.scriptexecute('myscript{$}', 'bar_two_lists', + keys=["{$}"], + inputs=[["a{$}"], ["b1{$}"]], + outputs=["c{$}"]) - con.set('NOT_TENSOR{1}', 'BAR') - # ERR Input key not tensor - self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', - keys=['{1}'], inputs=['NOT_TENSOR{1}', 'b{1}'], outputs=['c{1}']) - - self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', - keys=['{1}'], inputs=['b{1}'], outputs=['c{1}']) - - self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', keys=['{1}'], inputs=['b{1}'], outputs=[]) + values = con.tensorget("c{$}", as_numpy=False)['values'] + self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) - self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', keys=['{1}'], inputs=[], outputs=[]) + con.tensorset("mytensor1{$}", [40], dtype="float") + con.tensorset("mytensor2{$}", [1], dtype="float") + con.tensorset("mytensor3{$}", [1], dtype="float") + con.scriptexecute("myscript{$}", "addn", + keys=["{$}"], + inputs=["mytensor1{$}", ["mytensor2{$}", "mytensor3{$}"]], + outputs=["result{$}"]) - self.assertRaises(ResponseError, con.scriptexecute, 'ket{1}', 'bar', keys=[], inputs=[], outputs=[]) + values = con.tensorget("result{$}", as_numpy=False) + self.assertTrue(np.allclose(values["values"], [42])) - def test_scripts_execute_variadic_errors(self): + """ + def test_scripts_execute_multiple_list_input(self): con = self.get_client() - con.scriptset("ket{$}", "cpu", script, tag="version1") - con.tensorset("a{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - con.tensorset("b{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - - con.delete('EMPTY{$}') - # ERR Variadic input key is empty - self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', - keys=['{$}'], inputs=['a{$}'], list_inputs=[['EMPTY{$}', 'b{$}']], outputs=['c{$}']) - - con.set('NOT_TENSOR{$}', 'BAR') - # ERR Variadic input key not tensor - self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', - keys=['{$}'], inputs=['a{$}'], list_inputs=[['NOT_TENSOR{$}', 'b{$}']], outputs=['c{$}']) - - self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', - keys=['{$}'], inputs=['b{$}', '${$}'], outputs=['c{$}']) - - self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', - keys=['{$}'], inputs=['b{$}'], list_inputs=[[]], outputs=[]) - - self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', - keys=['{$}'], inputs=[], list_inputs=[[]], outputs=[]) - - self.assertRaises(ResponseError, con.scriptexecute, 'ket{$}', 'bar_variadic', - keys=['{$}'], list_inputs=[['a{$}'], ['b{$}']], outputs=[]) + con.scriptset("myscript{1}", "cpu", script_with_redis_commands) + con.tensorset("mytensor1{1}", [40], dtype="float") + con.tensorset("mytensor2{1}", [10], dtype="float") + con.tensorset("mytensor3{1}", [1], dtype="float") + con.scriptexecute("myscript{1}", "func", + keys=["{1}"], + inputs=["mytensor3{1}", 3, ["mytensor1{1}", "mytensor2{1}", "mytensor3{1}]"], "alon", [1.25, 4.2]], + outputs=["my_output{1}"]) + + values = con.tensorget("result{$}", as_numpy=False) + self.assertTrue(np.allclose(values["values"], [1, 51, 1.25, 4.2])) + """ def test_run_onnxml_model(self): mlmodel_path = os.path.join(MODEL_DIR, "boston.onnx") From cabbdc30c8273c5f145003dd56efdf4b063863dd Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Thu, 20 May 2021 17:35:59 +0300 Subject: [PATCH 19/34] new files --- setup.py | 32 ++++++++++++++++++++++++++++++++ test-requirements.txt | 8 ++++++++ 2 files changed, 40 insertions(+) create mode 100644 setup.py create mode 100644 test-requirements.txt diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..6b3dfd3 --- /dev/null +++ b/setup.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +from setuptools import setup, find_packages + + +with open("README.rst") as f: + long_description = f.read() + +setup( + name="redisai", + version="1.0.2", + description="RedisAI Python Client", + long_description=long_description, + long_description_content_type="text/x-rst", + url="http://github.com/RedisAI/redisai-py", + author="RedisLabs", + author_email="oss@redislabs.com", + packages=find_packages(), + install_requires=["redis", "hiredis", "numpy"], + python_requires=">=3.6", + classifiers=[ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Topic :: Database", + ], +) diff --git a/test-requirements.txt b/test-requirements.txt new file mode 100644 index 0000000..38dac3d --- /dev/null +++ b/test-requirements.txt @@ -0,0 +1,8 @@ +hiredis>=0.2.0 +redis>=2.10 +rmtest>=0.2 +six>=1.10.0 +nose +codecov +numpy +ml2rt From 1f5011ba7c41e8e394bcb9acc091785f0f4ef65a Mon Sep 17 00:00:00 2001 From: Avital Fine <79420960+AvitalFineRedis@users.noreply.github.com> Date: Thu, 20 May 2021 17:42:34 +0300 Subject: [PATCH 20/34] Update test-requirements.txt --- test-requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/test-requirements.txt b/test-requirements.txt index 38dac3d..709e733 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -6,3 +6,4 @@ nose codecov numpy ml2rt +deprecated From 4a2b3a0d3748de357b1af063dc84a9aa76ae563d Mon Sep 17 00:00:00 2001 From: Avital Fine <79420960+AvitalFineRedis@users.noreply.github.com> Date: Thu, 20 May 2021 17:48:38 +0300 Subject: [PATCH 21/34] Update postprocessor.py --- redisai/postprocessor.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/redisai/postprocessor.py b/redisai/postprocessor.py index 1ddc3d2..4772e3c 100644 --- a/redisai/postprocessor.py +++ b/redisai/postprocessor.py @@ -63,8 +63,10 @@ def infoget(res): decoder = staticmethod(decoder) decoding_functions = ( "loadbackend", + "modelstore", "modelset", "modeldel", + "modelexecute", "modelrun", "tensorset", "scriptset", From a2e4b655c926007b8dcdc8893d2f65a138c4837c Mon Sep 17 00:00:00 2001 From: Avital Fine <79420960+AvitalFineRedis@users.noreply.github.com> Date: Thu, 20 May 2021 17:49:30 +0300 Subject: [PATCH 22/34] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6b3dfd3..f5de6bb 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ author="RedisLabs", author_email="oss@redislabs.com", packages=find_packages(), - install_requires=["redis", "hiredis", "numpy"], + install_requires=["redis", "hiredis", "numpy", "deprecated"], python_requires=">=3.6", classifiers=[ "Development Status :: 4 - Beta", From 24537c876776162897e03823912b63763ced57c0 Mon Sep 17 00:00:00 2001 From: Avital Fine <79420960+AvitalFineRedis@users.noreply.github.com> Date: Sun, 23 May 2021 09:34:58 +0300 Subject: [PATCH 23/34] Update test.py --- test/test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test.py b/test/test.py index 084c9f6..7647d82 100644 --- a/test/test.py +++ b/test/test.py @@ -469,7 +469,7 @@ def test_scripts_execute_multiple_list_input(self): con.tensorset("mytensor3{1}", [1], dtype="float") con.scriptexecute("myscript{1}", "func", keys=["{1}"], - inputs=["mytensor3{1}", 3, ["mytensor1{1}", "mytensor2{1}", "mytensor3{1}]"], "alon", [1.25, 4.2]], + inputs=["mytensor3{1}", 3, ["mytensor1{1}", "mytensor2{1}", "mytensor3{1}]"], "test", [1.25, 4.2]], outputs=["my_output{1}"]) values = con.tensorget("result{$}", as_numpy=False) From 53b1106e7ed556293875edd57e05c94019c74fe2 Mon Sep 17 00:00:00 2001 From: Avital Fine <79420960+AvitalFineRedis@users.noreply.github.com> Date: Tue, 25 May 2021 09:11:03 +0300 Subject: [PATCH 24/34] Update client.py Change the function variable in scriptrun function from AnyStr to str --- redisai/client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index c7b7239..b35ebe7 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -626,7 +626,7 @@ def scriptdel(self, key: AnyStr) -> str: def scriptrun( self, key: AnyStr, - function: AnyStr, + function: str, inputs: Union[AnyStr, Sequence[AnyStr]], outputs: Union[AnyStr, Sequence[AnyStr]], ) -> str: @@ -637,7 +637,7 @@ def scriptrun( ---------- key : AnyStr Script key - function : AnyStr + function : str Name of the function in the ``script`` inputs : Union[AnyStr, List[AnyStr]] Tensor(s) which is already saved in the RedisAI using a tensorset call. These From 2a60e5795fec37ca83e01d98eb7423e06a55ff55 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 7 Jul 2021 10:26:31 +0300 Subject: [PATCH 25/34] add scriptstore, and update scriptexecute & tests --- redisai/client.py | 65 +++++++++++++++++--- redisai/command_builder.py | 33 +++++----- redisai/postprocessor.py | 1 + test/test.py | 123 +++++++++++++++++-------------------- 4 files changed, 128 insertions(+), 94 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index b35ebe7..27f39f9 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -528,6 +528,50 @@ def tensorget( else processor.tensorget(res, as_numpy, as_numpy_mutable, meta_only) ) + def scriptstore( + self, key: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]] , tag: AnyStr = None + ) -> str: + """ + Set the script to RedisAI. Action similar to scriptset. The difference is that in + scriptstore you must specify entry points for your script. RedisAI uses the TorchScript + engine to execute the script. So the script should have only TorchScript supported + constructs. That being said, it's important to mention that using redisai script + to do post processing or pre processing for a Tensorflow (or any other backend) + is completely valid. For more details about TorchScript and supported ops, + checkout TorchScript documentation. + + Parameters + ---------- + key : AnyStr + Script key at the server + device : str + Device name. Allowed devices are CPU and GPU. If multiple GPUs are available. + it can be specified using the format GPU:. For example: GPU:0 + script : str + Script itself, as a Python string + entry_points : Union[str, Sequence[str]] + A list of entry points to be used in the script. Each entry point should have + the signature of 'def entry_point(tensors: List[Tensor], keys: List[str], args: List[str])' + tag : AnyStr + Any string that will be saved in RedisAI as tag for the model + + Returns + ------- + str + 'OK' if success, raise an exception otherwise + + Note + ---- + Even though ``script`` is pure Python code, it's a subset of Python language and not + all the Python operations are supported. For more details, checkout TorchScript + documentation. It's also important to note that that the script is executed on a high + performance C++ runtime instead of the Python interpreter. And hence ``script`` should + not have any import statements (A common mistake people make all the time) + """ + args = builder.scriptstore(key, device, script, entry_points, tag) + res = self.execute_command(*args) + return res if not self.enable_postprocess else processor.scriptstore(res) + def scriptset( self, key: AnyStr, device: str, script: str, tag: AnyStr = None ) -> str: @@ -664,8 +708,9 @@ def scriptexecute( self, key: AnyStr, function: str, - keys: Union[AnyStr, Sequence[AnyStr]], - inputs: Union[AnyStr, Sequence[Union[AnyStr, Sequence[AnyStr]]]] = None, + keys: Union[AnyStr, Sequence[AnyStr]] = None, + inputs: Union[AnyStr, Sequence[AnyStr]] = None, + input_args: Union[AnyStr, Sequence[AnyStr]] = None, outputs: Union[AnyStr, Sequence[AnyStr]] = None, timeout: int = None, ) -> str: @@ -681,13 +726,13 @@ def scriptexecute( keys : Union[AnyStr, Sequence[AnyStr]] Either a squence of key names that the script will access before, during and after its execution, or a tag which all those keys share. - inputs : Union[AnyStr, Sequence[Union[AnyStr, Sequence[AnyStr]]]] - The inputs can be tensor key name, string , int or float. - These inputs will be used as the input for the scriptexecute function. - The order of the input should be aligned with the order of their respected - parameter at the function signature. - When one of the elements in the inputs list is a list (or a tuple), that element - will be treated as LIST_INPUTS in the command executaion. + These keys will be used as the 'keys' for the scriptexecute function. + inputs : Union[AnyStr, Sequence[AnyStr]] + keys or inputs must be provided (or both). + These inputs will be used as the 'tensors' for the scriptexecute function. + input_args : Union[AnyStr, Sequence[AnyStr]] + These inputs will be used as the 'args' for the scriptexecute function. + They can be integers, floats or strings. outputs : Union[AnyStr, List[AnyStr]] keys on which the outputs to be saved. If those keys exist already, scriptexecute will overwrite them with new values. @@ -711,7 +756,7 @@ def scriptexecute( >>> outputs=['result{tag}']) 'OK' """ - args = builder.scriptexecute(key, function, keys, inputs, outputs, timeout) + args = builder.scriptexecute(key, function, keys, inputs, input_args, outputs, timeout) res = self.execute_command(*args) return res if not self.enable_postprocess else processor.scriptexecute(res) diff --git a/redisai/command_builder.py b/redisai/command_builder.py index 785c7b8..cdf9ea8 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -203,6 +203,17 @@ def tensorget(key: AnyStr, as_numpy: bool = True, meta_only: bool = False) -> Se args.append("VALUES") return args +def scriptstore(name: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]], tag: AnyStr = None)\ + -> Sequence: + if device.upper() not in utils.allowed_devices: + raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") + args = ["AI.SCRIPTSTORE", name, device] + if tag: + args += ["TAG", tag] + args += ["ENTRY_POINTS", len(utils.listify(entry_points)), *utils.listify(entry_points)] + args.append("SOURCE") + args.append(script) + return args def scriptset(name: AnyStr, device: str, script: str, tag: AnyStr = None) -> Sequence: if device.upper() not in utils.allowed_devices: @@ -249,11 +260,12 @@ def scriptexecute( name: AnyStr, function: str, keys: Union[AnyStr, Sequence[AnyStr]], - inputs: Union[AnyStr, Sequence[Union[AnyStr, Sequence[AnyStr]]]], + inputs: Union[AnyStr, Sequence[AnyStr]], + input_args: Union[AnyStr, Sequence[AnyStr]], outputs: Union[AnyStr, Sequence[AnyStr]], timeout: int, ) -> Sequence: - if name is None or function is None or keys is None: + if name is None or function is None or (keys is None and inputs is None): raise ValueError("Missing required arguments for script execute command") args = [ "AI.SCRIPTEXECUTE", @@ -265,20 +277,9 @@ def scriptexecute( ] if inputs is not None: - temp_inputs = [] - if not isinstance(inputs, (list, tuple)): - args += ["INPUTS", 1, inputs] - else: - for elem in inputs: - if isinstance(elem, (list, tuple)): - if temp_inputs: - args += ["INPUTS", len(temp_inputs), *temp_inputs] - temp_inputs = [] - args += ["LIST_INPUTS", len(utils.listify(elem)), *utils.listify(elem)] - else: - temp_inputs.append(elem) - if temp_inputs: - args += ["INPUTS", len(temp_inputs), *temp_inputs] + args += ["INPUTS", len(utils.listify(inputs)), *utils.listify(inputs)] + if input_args is not None: + args += ["ARGS", len(utils.listify(input_args)), *utils.listify(input_args)] if outputs is not None: args += ["OUTPUTS", len(utils.listify(outputs)), *utils.listify(outputs)] if timeout is not None: diff --git a/redisai/postprocessor.py b/redisai/postprocessor.py index 4772e3c..ae93fab 100644 --- a/redisai/postprocessor.py +++ b/redisai/postprocessor.py @@ -70,6 +70,7 @@ def infoget(res): "modelrun", "tensorset", "scriptset", + "scriptstore", "scriptdel", "scriptrun", "scriptexecute", diff --git a/test/test.py b/test/test.py index 7647d82..d430b0d 100644 --- a/test/test.py +++ b/test/test.py @@ -27,36 +27,43 @@ def __exit__(self, *args): MODEL_DIR = os.path.dirname(os.path.abspath(__file__)) + "/testdata" -script = r""" +script_old = r""" def bar(a, b): return a + b def bar_variadic(a, args : List[Tensor]): return args[0] + args[1] - -def bar_two_lists(a: List[Tensor], b:List[Tensor]): - return a[0] + b[0] - -def addn(a, args : List[Tensor]): - return a + torch.stack(args).sum() +""" + +script = r""" +def bar(tensors: List[Tensor], keys: List[str], args: List[str]): + a = tensors[0] + b = tensors[1] + return a + b + +def bar_variadic(tensors: List[Tensor], keys: List[str], args: List[str]): + a = tensors[0] + l = tensors[1:] + return a + l[0] """ script_with_redis_commands = r""" def redis_string_int_to_tensor(redis_value: Any): return torch.tensor(int(str(redis_value))) -def int_set_get(key:str, value:int): +def int_set_get(tensors: List[Tensor], keys: List[str], args: List[str]): + key = args[0] + value = int(args[1]) redis.execute("SET", key, str(value)) res = redis.execute("GET", key) return redis_string_int_to_tensor(res) - -def func(a: Tensor, b: int, c: List[Tensor], d:str, e:List[float]): - redis.execute("SET", "key{1}", str(b)) - redis.execute("SET", d, str(torch.stack(c).sum().data[0])) - res = redis.execute("GET", d) - temp = redis_string_int_to_tensor(res) - redis.execute("DEL", d) - return torch.cat([a, temp, torch.tensor(e)], 0) + +def func(tensors: List[Tensor], keys: List[str], args: List[str]): + redis.execute("SET", keys[0], args[0]) + a = torch.stack(tensors).sum() + b = redis_string_int_to_tensor(redis.execute("GET", keys[0])) + redis.execute("DEL", keys[0]) + return b + a """ @@ -380,26 +387,34 @@ def test_deprecated_script_run(self): con = self.get_client() self.assertRaises(ResponseError, con.scriptset, "ket", "cpu", "return 1") - con.scriptset("ket", "cpu", script) + con.scriptset("ket", "cpu", script_old, "bar") con.tensorset("a", (2, 3), dtype="float") con.tensorset("b", (2, 3), dtype="float") + + # test bar(a, b) con.scriptrun("ket", "bar", inputs=["a", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) + # test bar_variadic(a, args : List[Tensor]) + con.scriptrun("ket", "bar_variadic", inputs=["a", "$", "b", "b"], outputs=["c"]) + tensor = con.tensorget("c", as_numpy=False) + self.assertEqual([4, 6], tensor["values"]) + def test_scripts_execute(self): con = self.get_client() - self.assertRaises(ResponseError, con.scriptset, "ket", "cpu", "return 1") - con.scriptset("ket", "cpu", script) + self.assertRaises(ResponseError, con.scriptstore, "ket", "cpu", "return 1", "f") + con.scriptstore("ket", "cpu", script, "bar") con.tensorset("a", (2, 3), dtype="float") con.tensorset("b", (2, 3), dtype="float") # try with bad arguments: with self.assertRaises(ValueError) as e: - con.scriptexecute("ket", function=None, keys=None) + con.scriptexecute("ket", function=None, keys=None, inputs=None) self.assertEqual(str(e.exception), "Missing required arguments for script execute command") self.assertRaises(ResponseError, con.scriptexecute, "ket", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"]) + # update new bar con.scriptexecute("ket", "bar", keys=["a", "b", "c"], inputs=["a", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) @@ -409,72 +424,44 @@ def test_scripts_execute(self): script_det = con.scriptget("ket", meta_only=True) self.assertTrue(script_det["device"] == "cpu") self.assertNotIn("source", script_det) + # delete the script con.scriptdel("ket") self.assertRaises(ResponseError, con.scriptget, "ket") - con.scriptset("myscript{1}", "cpu", script, "version1") + # store new script + con.scriptstore("myscript{1}", "cpu", script, ["bar", "bar_variadic"], "version1") con.tensorset("a{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") con.tensorset("b{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") con.scriptexecute("myscript{1}", "bar", keys=["{1}"], inputs=["a{1}", "b{1}"], outputs=["c{1}"]) values = con.tensorget("c{1}", as_numpy=False) self.assertTrue(np.allclose(values["values"], [4.0, 6.0, 4.0, 6.0])) - def test_scripts_commands_support(self): - con = self.get_client() - con.scriptset("myscript{1}", "cpu", script_with_redis_commands) - con.scriptexecute("myscript{1}", "int_set_get", keys=["{1}"], inputs=["x{1}", 3], outputs=["y{1}"]) - values = con.tensorget("y{1}", as_numpy=False) - self.assertTrue(np.allclose(values["values"], [3])) - - def test_scripts_execute_list_input(self): - con = self.get_client() - con.scriptset("myscript{$}", "cpu", script, "version1") - con.tensorset("a{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - con.tensorset("b1{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - con.tensorset("b2{$}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - - con.scriptexecute("myscript{$}", 'bar_variadic', - keys=["{$}"], - inputs=["a{$}", ["b1{$}", "b2{$}"]], - outputs=["c{$}"]) - - values = con.tensorget("c{$}", as_numpy=False)['values'] - self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) + con.tensorset("b1{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") + con.scriptexecute("myscript{1}", 'bar_variadic', + keys=["{1}"], + inputs=["a{1}", "b1{1}", "b{1}"], + outputs=["c{1}"]) - con.scriptexecute('myscript{$}', 'bar_two_lists', - keys=["{$}"], - inputs=[["a{$}"], ["b1{$}"]], - outputs=["c{$}"]) - - values = con.tensorget("c{$}", as_numpy=False)['values'] + values = con.tensorget("c{1}", as_numpy=False)['values'] self.assertEqual(values, [4.0, 6.0, 4.0, 6.0]) - con.tensorset("mytensor1{$}", [40], dtype="float") - con.tensorset("mytensor2{$}", [1], dtype="float") - con.tensorset("mytensor3{$}", [1], dtype="float") - con.scriptexecute("myscript{$}", "addn", - keys=["{$}"], - inputs=["mytensor1{$}", ["mytensor2{$}", "mytensor3{$}"]], - outputs=["result{$}"]) - - values = con.tensorget("result{$}", as_numpy=False) - self.assertTrue(np.allclose(values["values"], [42])) - - """ - def test_scripts_execute_multiple_list_input(self): + def test_scripts_redis_commands(self): con = self.get_client() - con.scriptset("myscript{1}", "cpu", script_with_redis_commands) + con.scriptstore("myscript{1}", "cpu", script_with_redis_commands, ["int_set_get", "func"]) + con.scriptexecute("myscript{1}", "int_set_get", keys=["{1}"], input_args=["x{1}", "3"], outputs=["y{1}"]) + values = con.tensorget("y{1}", as_numpy=False) + self.assertTrue(np.allclose(values["values"], [3])) + con.tensorset("mytensor1{1}", [40], dtype="float") con.tensorset("mytensor2{1}", [10], dtype="float") con.tensorset("mytensor3{1}", [1], dtype="float") con.scriptexecute("myscript{1}", "func", - keys=["{1}"], - inputs=["mytensor3{1}", 3, ["mytensor1{1}", "mytensor2{1}", "mytensor3{1}]"], "test", [1.25, 4.2]], + keys=["key{1}", "key2{1}"], + inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], + input_args=["3"], outputs=["my_output{1}"]) - - values = con.tensorget("result{$}", as_numpy=False) - self.assertTrue(np.allclose(values["values"], [1, 51, 1.25, 4.2])) - """ + values = con.tensorget("my_output{1}", as_numpy=False) + self.assertTrue(np.allclose(values["values"], [54])) def test_run_onnxml_model(self): mlmodel_path = os.path.join(MODEL_DIR, "boston.onnx") From 6ea2a9fb72044c6f2b5924d7a9378a039fd5337a Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 7 Jul 2021 10:32:28 +0300 Subject: [PATCH 26/34] fix examples --- redisai/client.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/redisai/client.py b/redisai/client.py index 27f39f9..e227873 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -567,6 +567,12 @@ def scriptstore( documentation. It's also important to note that that the script is executed on a high performance C++ runtime instead of the Python interpreter. And hence ``script`` should not have any import statements (A common mistake people make all the time) + + Example + ------- + >>> script = open(scriptpath).read() + >>> con.scriptset('ket', 'cpu', script, 'func') + 'OK' """ args = builder.scriptstore(key, device, script, entry_points, tag) res = self.execute_command(*args) @@ -752,7 +758,8 @@ def scriptexecute( 'OK' >>> con.scriptexecute('myscript{tag}', 'addn', >>> keys=['{tag}'], - >>> inputs=['mytensor1{tag}', ['mytensor2{tag}', 'mytensor3{tag}']], + >>> inputs=['mytensor1{tag}', 'mytensor2{tag}', 'mytensor3{tag}'], + >>> input_args=['5.0'], >>> outputs=['result{tag}']) 'OK' """ From 569604e06053e1ddc59aa8c32374513146646144 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 7 Jul 2021 10:58:47 +0300 Subject: [PATCH 27/34] typo --- redisai/client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/redisai/client.py b/redisai/client.py index e227873..ac12ee3 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -571,7 +571,7 @@ def scriptstore( Example ------- >>> script = open(scriptpath).read() - >>> con.scriptset('ket', 'cpu', script, 'func') + >>> con.scriptsore('ket', 'cpu', script, 'func') 'OK' """ args = builder.scriptstore(key, device, script, entry_points, tag) From 7866203c858300ac48d6ec12bafb03ccf4761658 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 7 Jul 2021 11:20:56 +0300 Subject: [PATCH 28/34] flake8 warnings --- redisai/client.py | 2 +- redisai/command_builder.py | 13 +++++++++++-- test/test.py | 15 +++++++-------- 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index ac12ee3..b56d12f 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -529,7 +529,7 @@ def tensorget( ) def scriptstore( - self, key: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]] , tag: AnyStr = None + self, key: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]], tag: AnyStr = None ) -> str: """ Set the script to RedisAI. Action similar to scriptset. The difference is that in diff --git a/redisai/command_builder.py b/redisai/command_builder.py index cdf9ea8..fbc2048 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -203,8 +203,14 @@ def tensorget(key: AnyStr, as_numpy: bool = True, meta_only: bool = False) -> Se args.append("VALUES") return args -def scriptstore(name: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]], tag: AnyStr = None)\ - -> Sequence: + +def scriptstore( + name: AnyStr, + device: str, + script: str, + entry_points: Union[str, Sequence[str]], + tag: AnyStr = None +) -> Sequence: if device.upper() not in utils.allowed_devices: raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") args = ["AI.SCRIPTSTORE", name, device] @@ -215,6 +221,7 @@ def scriptstore(name: AnyStr, device: str, script: str, entry_points: Union[str, args.append(script) return args + def scriptset(name: AnyStr, device: str, script: str, tag: AnyStr = None) -> Sequence: if device.upper() not in utils.allowed_devices: raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") @@ -256,6 +263,7 @@ def scriptrun( ) return args + def scriptexecute( name: AnyStr, function: str, @@ -287,6 +295,7 @@ def scriptexecute( return args + def scriptscan() -> Sequence: return ("AI._SCRIPTSCAN",) diff --git a/test/test.py b/test/test.py index 2c138c3..193b01b 100644 --- a/test/test.py +++ b/test/test.py @@ -31,7 +31,7 @@ def __exit__(self, *args): script_old = r""" def bar(a, b): return a + b - + def bar_variadic(a, args : List[Tensor]): return args[0] + args[1] """ @@ -58,16 +58,15 @@ def int_set_get(tensors: List[Tensor], keys: List[str], args: List[str]): redis.execute("SET", key, str(value)) res = redis.execute("GET", key) return redis_string_int_to_tensor(res) - + def func(tensors: List[Tensor], keys: List[str], args: List[str]): redis.execute("SET", keys[0], args[0]) a = torch.stack(tensors).sum() b = redis_string_int_to_tensor(redis.execute("GET", keys[0])) redis.execute("DEL", keys[0]) - return b + a + return b + a """ - class RedisAITestBase(TestCase): def setUp(self): super().setUp() @@ -457,10 +456,10 @@ def test_scripts_redis_commands(self): con.tensorset("mytensor2{1}", [10], dtype="float") con.tensorset("mytensor3{1}", [1], dtype="float") con.scriptexecute("myscript{1}", "func", - keys=["key{1}", "key2{1}"], - inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], - input_args=["3"], - outputs=["my_output{1}"]) + keys=["key{1}", "key2{1}"], + inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], + input_args=["3"], + outputs=["my_output{1}"]) values = con.tensorget("my_output{1}", as_numpy=False) self.assertTrue(np.allclose(values["values"], [54])) From 300db9de68ef2db28285dc9268f8e3267c3ad232 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 7 Jul 2021 11:20:56 +0300 Subject: [PATCH 29/34] flake8 warnings --- redisai/client.py | 2 +- redisai/command_builder.py | 13 +++++++++++-- test/test.py | 14 +++++++------- 3 files changed, 19 insertions(+), 10 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index ac12ee3..b56d12f 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -529,7 +529,7 @@ def tensorget( ) def scriptstore( - self, key: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]] , tag: AnyStr = None + self, key: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]], tag: AnyStr = None ) -> str: """ Set the script to RedisAI. Action similar to scriptset. The difference is that in diff --git a/redisai/command_builder.py b/redisai/command_builder.py index cdf9ea8..fbc2048 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -203,8 +203,14 @@ def tensorget(key: AnyStr, as_numpy: bool = True, meta_only: bool = False) -> Se args.append("VALUES") return args -def scriptstore(name: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]], tag: AnyStr = None)\ - -> Sequence: + +def scriptstore( + name: AnyStr, + device: str, + script: str, + entry_points: Union[str, Sequence[str]], + tag: AnyStr = None +) -> Sequence: if device.upper() not in utils.allowed_devices: raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") args = ["AI.SCRIPTSTORE", name, device] @@ -215,6 +221,7 @@ def scriptstore(name: AnyStr, device: str, script: str, entry_points: Union[str, args.append(script) return args + def scriptset(name: AnyStr, device: str, script: str, tag: AnyStr = None) -> Sequence: if device.upper() not in utils.allowed_devices: raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") @@ -256,6 +263,7 @@ def scriptrun( ) return args + def scriptexecute( name: AnyStr, function: str, @@ -287,6 +295,7 @@ def scriptexecute( return args + def scriptscan() -> Sequence: return ("AI._SCRIPTSCAN",) diff --git a/test/test.py b/test/test.py index 2c138c3..dac121d 100644 --- a/test/test.py +++ b/test/test.py @@ -31,7 +31,7 @@ def __exit__(self, *args): script_old = r""" def bar(a, b): return a + b - + def bar_variadic(a, args : List[Tensor]): return args[0] + args[1] """ @@ -58,13 +58,13 @@ def int_set_get(tensors: List[Tensor], keys: List[str], args: List[str]): redis.execute("SET", key, str(value)) res = redis.execute("GET", key) return redis_string_int_to_tensor(res) - + def func(tensors: List[Tensor], keys: List[str], args: List[str]): redis.execute("SET", keys[0], args[0]) a = torch.stack(tensors).sum() b = redis_string_int_to_tensor(redis.execute("GET", keys[0])) redis.execute("DEL", keys[0]) - return b + a + return b + a """ @@ -457,10 +457,10 @@ def test_scripts_redis_commands(self): con.tensorset("mytensor2{1}", [10], dtype="float") con.tensorset("mytensor3{1}", [1], dtype="float") con.scriptexecute("myscript{1}", "func", - keys=["key{1}", "key2{1}"], - inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], - input_args=["3"], - outputs=["my_output{1}"]) + keys=["key{1}", "key2{1}"], + inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], + input_args=["3"], + outputs=["my_output{1}"]) values = con.tensorget("my_output{1}", as_numpy=False) self.assertTrue(np.allclose(values["values"], [54])) From 12ad6bd7adafdfe3f56e511482f0ec7e1a726c73 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Wed, 7 Jul 2021 11:43:42 +0300 Subject: [PATCH 30/34] typo --- test/test.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/test/test.py b/test/test.py index dac121d..6afe0e1 100644 --- a/test/test.py +++ b/test/test.py @@ -388,7 +388,7 @@ def test_deprecated_script_run(self): con = self.get_client() self.assertRaises(ResponseError, con.scriptset, "ket", "cpu", "return 1") - con.scriptset("ket", "cpu", script_old, "bar") + con.scriptset("ket", "cpu", script_old) con.tensorset("a", (2, 3), dtype="float") con.tensorset("b", (2, 3), dtype="float") @@ -404,18 +404,16 @@ def test_deprecated_script_run(self): def test_scripts_execute(self): con = self.get_client() - self.assertRaises(ResponseError, con.scriptstore, "ket", "cpu", "return 1", "f") - con.scriptstore("ket", "cpu", script, "bar") - con.tensorset("a", (2, 3), dtype="float") - con.tensorset("b", (2, 3), dtype="float") - # try with bad arguments: with self.assertRaises(ValueError) as e: con.scriptexecute("ket", function=None, keys=None, inputs=None) self.assertEqual(str(e.exception), "Missing required arguments for script execute command") self.assertRaises(ResponseError, con.scriptexecute, "ket", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"]) + self.assertRaises(ResponseError, con.scriptstore, "ket", "cpu", "return 1", "f") - # update new bar + con.scriptstore("ket", "cpu", script, "bar") + con.tensorset("a", (2, 3), dtype="float") + con.tensorset("b", (2, 3), dtype="float") con.scriptexecute("ket", "bar", keys=["a", "b", "c"], inputs=["a", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) From 9a6344a4af4f76f89888bd2fe9c5574facaa8e3e Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Sun, 18 Jul 2021 10:31:58 +0300 Subject: [PATCH 31/34] fix PR comments --- redisai/client.py | 64 ++++++++++++++++++++++---------------- redisai/command_builder.py | 13 +++----- redisai/pipeline.py | 2 +- test/test.py | 49 +++++++++++++++++------------ 4 files changed, 72 insertions(+), 56 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index b56d12f..3bfa912 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -532,13 +532,14 @@ def scriptstore( self, key: AnyStr, device: str, script: str, entry_points: Union[str, Sequence[str]], tag: AnyStr = None ) -> str: """ - Set the script to RedisAI. Action similar to scriptset. The difference is that in - scriptstore you must specify entry points for your script. RedisAI uses the TorchScript - engine to execute the script. So the script should have only TorchScript supported - constructs. That being said, it's important to mention that using redisai script - to do post processing or pre processing for a Tensorflow (or any other backend) - is completely valid. For more details about TorchScript and supported ops, - checkout TorchScript documentation. + Set the script to RedisAI. The difference from scriptset is that in scriptstore + you must specify entry points within your script. These functions must have specific + signature: 'def entry_point(tensors: List[Tensor], keys: List[str], args: List[str])'. + RedisAI uses the TorchScript engine to execute the script. So the script should + have only TorchScript supported constructs. That being said, it's important to + mention that using redisai script to do post processing or pre processing for a + Tensorflow (or any other backend) is completely valid. For more details about + TorchScript and supported ops, checkout TorchScript documentation. Parameters ---------- @@ -550,10 +551,13 @@ def scriptstore( script : str Script itself, as a Python string entry_points : Union[str, Sequence[str]] - A list of entry points to be used in the script. Each entry point should have - the signature of 'def entry_point(tensors: List[Tensor], keys: List[str], args: List[str])' + A list of functions in the script that may serve as entry point for the + execution. Each entry point must have the specify signature: + def entry_point(tensors: List[Tensor], keys: List[str], args: List[str])) + Note that the script may contain additional helper functions that doesn't + have to follow this signature. tag : AnyStr - Any string that will be saved in RedisAI as tag for the model + Any string that will be saved in RedisAI as tag for the script Returns ------- @@ -570,14 +574,20 @@ def scriptstore( Example ------- - >>> script = open(scriptpath).read() - >>> con.scriptsore('ket', 'cpu', script, 'func') + >>> script = r''' + >>> def bar(tensors: List[Tensor], keys: List[str], args: List[str]): + >>> a = tensors[0] + >>> b = tensors[1] + >>> return a + b + >>>''' + >>> con.scriptstore('ket', 'cpu', script, 'bar') 'OK' """ args = builder.scriptstore(key, device, script, entry_points, tag) res = self.execute_command(*args) return res if not self.enable_postprocess else processor.scriptstore(res) + @deprecated(version="1.2.0", reason="Use scriptstore instead") def scriptset( self, key: AnyStr, device: str, script: str, tag: AnyStr = None ) -> str: @@ -716,12 +726,13 @@ def scriptexecute( function: str, keys: Union[AnyStr, Sequence[AnyStr]] = None, inputs: Union[AnyStr, Sequence[AnyStr]] = None, - input_args: Union[AnyStr, Sequence[AnyStr]] = None, + args: Union[AnyStr, Sequence[AnyStr]] = None, outputs: Union[AnyStr, Sequence[AnyStr]] = None, timeout: int = None, ) -> str: """ - Run an already set script. Similar to modelexecute + Run an already set script. Similar to modelexecute. + Must specify keys or inputs. Parameters ---------- @@ -730,18 +741,17 @@ def scriptexecute( function : str Name of the function in the ``script`` keys : Union[AnyStr, Sequence[AnyStr]] - Either a squence of key names that the script will access before, during and - after its execution, or a tag which all those keys share. - These keys will be used as the 'keys' for the scriptexecute function. + Denotes the list of Redis key names that the script will access to + during its execution, for both read and/or write operations. inputs : Union[AnyStr, Sequence[AnyStr]] - keys or inputs must be provided (or both). - These inputs will be used as the 'tensors' for the scriptexecute function. - input_args : Union[AnyStr, Sequence[AnyStr]] - These inputs will be used as the 'args' for the scriptexecute function. - They can be integers, floats or strings. + Denotes the input tensors list. + args : Union[AnyStr, Sequence[AnyStr]] + Denotes the list of additional arguments that a user can send to the + script. All args are sent as strings, but can be casted to other types + supported by torch script, such as int, or float. outputs : Union[AnyStr, List[AnyStr]] - keys on which the outputs to be saved. If those keys exist already, scriptexecute - will overwrite them with new values. + Denotes the output tensors keys' list. If those keys exist already, + scriptexecute will overwrite them with new values. timeout : int The max number on milisecinds that may pass before the request is prossced (meaning that the result will not be computed after that time and TIMEDOUT @@ -754,16 +764,16 @@ def scriptexecute( Example ------- - >>> con.scriptexecute('ket', 'bar', keys=['a', 'b', 'c'], inputs=['a', 'b'], outputs=['c']) + >>> con.scriptexecute('myscript', 'bar', inputs=['a', 'b'], outputs=['c']) 'OK' >>> con.scriptexecute('myscript{tag}', 'addn', >>> keys=['{tag}'], >>> inputs=['mytensor1{tag}', 'mytensor2{tag}', 'mytensor3{tag}'], - >>> input_args=['5.0'], + >>> args=['5.0'], >>> outputs=['result{tag}']) 'OK' """ - args = builder.scriptexecute(key, function, keys, inputs, input_args, outputs, timeout) + args = builder.scriptexecute(key, function, keys, inputs, args, outputs, timeout) res = self.execute_command(*args) return res if not self.enable_postprocess else processor.scriptexecute(res) diff --git a/redisai/command_builder.py b/redisai/command_builder.py index fbc2048..200a4d3 100644 --- a/redisai/command_builder.py +++ b/redisai/command_builder.py @@ -213,6 +213,8 @@ def scriptstore( ) -> Sequence: if device.upper() not in utils.allowed_devices: raise ValueError(f"Device not allowed. Use any from {utils.allowed_devices}") + if name is None or script is None or entry_points is None: + raise ValueError("Missing required arguments for script store command") args = ["AI.SCRIPTSTORE", name, device] if tag: args += ["TAG", tag] @@ -275,15 +277,10 @@ def scriptexecute( ) -> Sequence: if name is None or function is None or (keys is None and inputs is None): raise ValueError("Missing required arguments for script execute command") - args = [ - "AI.SCRIPTEXECUTE", - name, - function, - "KEYS", - len(utils.listify(keys)), - *utils.listify(keys), - ] + args = ["AI.SCRIPTEXECUTE", name, function] + if keys is not None: + args += ["KEYS", len(utils.listify(keys)), *utils.listify(keys)] if inputs is not None: args += ["INPUTS", len(utils.listify(inputs)), *utils.listify(inputs)] if input_args is not None: diff --git a/redisai/pipeline.py b/redisai/pipeline.py index 1415b25..2f6efd6 100644 --- a/redisai/pipeline.py +++ b/redisai/pipeline.py @@ -1,5 +1,5 @@ from functools import partial -from typing import AnyStr, Union, Sequence +from typing import AnyStr, Sequence, Union import numpy as np diff --git a/test/test.py b/test/test.py index 6afe0e1..9869eb8 100644 --- a/test/test.py +++ b/test/test.py @@ -53,8 +53,8 @@ def redis_string_int_to_tensor(redis_value: Any): return torch.tensor(int(str(redis_value))) def int_set_get(tensors: List[Tensor], keys: List[str], args: List[str]): - key = args[0] - value = int(args[1]) + key = keys[0] + value = int(args[0]) redis.execute("SET", key, str(value)) res = redis.execute("GET", key) return redis_string_int_to_tensor(res) @@ -384,48 +384,56 @@ def test_run_tf_model(self): self.assertRaises(ResponseError, con.modelget, "m") # AI.SCRIPTRUN is deprecated by AI.SCRIPTEXECUTE - def test_deprecated_script_run(self): + # and AI.SCRIPTSET is deprecated by AI.SCRIPTSTORE + def test_deprecated_scriptset_and_scriptrun(self): con = self.get_client() - self.assertRaises(ResponseError, con.scriptset, - "ket", "cpu", "return 1") - con.scriptset("ket", "cpu", script_old) + self.assertRaises(ResponseError, con.scriptset, "scr", "cpu", "return 1") + con.scriptset("scr", "cpu", script_old) con.tensorset("a", (2, 3), dtype="float") con.tensorset("b", (2, 3), dtype="float") # test bar(a, b) - con.scriptrun("ket", "bar", inputs=["a", "b"], outputs=["c"]) + con.scriptrun("scr", "bar", inputs=["a", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) # test bar_variadic(a, args : List[Tensor]) - con.scriptrun("ket", "bar_variadic", inputs=["a", "$", "b", "b"], outputs=["c"]) + con.scriptrun("scr", "bar_variadic", inputs=["a", "$", "b", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) + def test_scriptstore(self): + con = self.get_client() + # try with bad arguments: + with self.assertRaises(ValueError) as e: + con.scriptstore("test", "cpu", script, entry_points=None) + self.assertEqual(str(e.exception), "Missing required arguments for script store command") + self.assertRaises(ValueError, con.scriptstore, "test", "cpu", script=None, entry_points="bar") + self.assertRaises(ResponseError, con.scriptstore, "ket", "cpu", "return 1", "f") + def test_scripts_execute(self): con = self.get_client() # try with bad arguments: with self.assertRaises(ValueError) as e: - con.scriptexecute("ket", function=None, keys=None, inputs=None) + con.scriptexecute("test", function=None, keys=None, inputs=None) self.assertEqual(str(e.exception), "Missing required arguments for script execute command") - self.assertRaises(ResponseError, con.scriptexecute, "ket", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"]) - self.assertRaises(ResponseError, con.scriptstore, "ket", "cpu", "return 1", "f") + self.assertRaises(ResponseError, con.scriptexecute, "test", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"]) - con.scriptstore("ket", "cpu", script, "bar") + con.scriptstore("test", "cpu", script, "bar") con.tensorset("a", (2, 3), dtype="float") con.tensorset("b", (2, 3), dtype="float") - con.scriptexecute("ket", "bar", keys=["a", "b", "c"], inputs=["a", "b"], outputs=["c"]) + con.scriptexecute("test", "bar", inputs=["a", "b"], outputs=["c"]) tensor = con.tensorget("c", as_numpy=False) self.assertEqual([4, 6], tensor["values"]) - script_det = con.scriptget("ket") + script_det = con.scriptget("test") self.assertTrue(script_det["device"] == "cpu") self.assertTrue(script_det["source"] == script) - script_det = con.scriptget("ket", meta_only=True) + script_det = con.scriptget("test", meta_only=True) self.assertTrue(script_det["device"] == "cpu") self.assertNotIn("source", script_det) # delete the script - con.scriptdel("ket") - self.assertRaises(ResponseError, con.scriptget, "ket") + con.scriptdel("test") + self.assertRaises(ResponseError, con.scriptget, "test") # store new script con.scriptstore("myscript{1}", "cpu", script, ["bar", "bar_variadic"], "version1") @@ -447,7 +455,7 @@ def test_scripts_execute(self): def test_scripts_redis_commands(self): con = self.get_client() con.scriptstore("myscript{1}", "cpu", script_with_redis_commands, ["int_set_get", "func"]) - con.scriptexecute("myscript{1}", "int_set_get", keys=["{1}"], input_args=["x{1}", "3"], outputs=["y{1}"]) + con.scriptexecute("myscript{1}", "int_set_get", keys=["x{1}", "{1}"], args=["3"], outputs=["y{1}"]) values = con.tensorget("y{1}", as_numpy=False) self.assertTrue(np.allclose(values["values"], [3])) @@ -455,12 +463,13 @@ def test_scripts_redis_commands(self): con.tensorset("mytensor2{1}", [10], dtype="float") con.tensorset("mytensor3{1}", [1], dtype="float") con.scriptexecute("myscript{1}", "func", - keys=["key{1}", "key2{1}"], + keys=["key{1}"], inputs=["mytensor1{1}", "mytensor2{1}", "mytensor3{1}"], - input_args=["3"], + args=["3"], outputs=["my_output{1}"]) values = con.tensorget("my_output{1}", as_numpy=False) self.assertTrue(np.allclose(values["values"], [54])) + self.assertIsNone(con.get("key{1}")) def test_run_onnxml_model(self): mlmodel_path = os.path.join(MODEL_DIR, "boston.onnx") From b8e77b90da7541196839a755423297419a931964 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Sun, 18 Jul 2021 13:48:03 +0300 Subject: [PATCH 32/34] fix PR comments --- redisai/client.py | 1 - test/test.py | 12 ++++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/redisai/client.py b/redisai/client.py index 3bfa912..1411ad8 100644 --- a/redisai/client.py +++ b/redisai/client.py @@ -767,7 +767,6 @@ def scriptexecute( >>> con.scriptexecute('myscript', 'bar', inputs=['a', 'b'], outputs=['c']) 'OK' >>> con.scriptexecute('myscript{tag}', 'addn', - >>> keys=['{tag}'], >>> inputs=['mytensor1{tag}', 'mytensor2{tag}', 'mytensor3{tag}'], >>> args=['5.0'], >>> outputs=['result{tag}']) diff --git a/test/test.py b/test/test.py index 9869eb8..4690117 100644 --- a/test/test.py +++ b/test/test.py @@ -409,7 +409,10 @@ def test_scriptstore(self): con.scriptstore("test", "cpu", script, entry_points=None) self.assertEqual(str(e.exception), "Missing required arguments for script store command") self.assertRaises(ValueError, con.scriptstore, "test", "cpu", script=None, entry_points="bar") - self.assertRaises(ResponseError, con.scriptstore, "ket", "cpu", "return 1", "f") + with self.assertRaises(ResponseError) as e: + con.scriptstore("test", "cpu", "return 1", "f") + self.assertEqual(str(e.exception), + "expected def but found 'return' here: File \"\", line 1 return 1 ~~~~~~ <--- HERE ") def test_scripts_execute(self): con = self.get_client() @@ -417,7 +420,9 @@ def test_scripts_execute(self): with self.assertRaises(ValueError) as e: con.scriptexecute("test", function=None, keys=None, inputs=None) self.assertEqual(str(e.exception), "Missing required arguments for script execute command") - self.assertRaises(ResponseError, con.scriptexecute, "test", "bar", keys=["a", "c"], inputs=["a"], outputs=["c"]) + with self.assertRaises(ResponseError) as e: + con.scriptexecute("test", "bar", inputs=["a"], outputs=["c"]) + self.assertEqual(str(e.exception), "script key is empty") con.scriptstore("test", "cpu", script, "bar") con.tensorset("a", (2, 3), dtype="float") @@ -439,13 +444,12 @@ def test_scripts_execute(self): con.scriptstore("myscript{1}", "cpu", script, ["bar", "bar_variadic"], "version1") con.tensorset("a{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") con.tensorset("b{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") - con.scriptexecute("myscript{1}", "bar", keys=["{1}"], inputs=["a{1}", "b{1}"], outputs=["c{1}"]) + con.scriptexecute("myscript{1}", "bar", inputs=["a{1}", "b{1}"], outputs=["c{1}"]) values = con.tensorget("c{1}", as_numpy=False) self.assertTrue(np.allclose(values["values"], [4.0, 6.0, 4.0, 6.0])) con.tensorset("b1{1}", [2, 3, 2, 3], shape=(2, 2), dtype="float") con.scriptexecute("myscript{1}", 'bar_variadic', - keys=["{1}"], inputs=["a{1}", "b1{1}", "b{1}"], outputs=["c{1}"]) From 964d8baa35deeddbdb8689c6aa60bd718853227b Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Sun, 18 Jul 2021 13:57:38 +0300 Subject: [PATCH 33/34] fix imports --- redisai/dag.py | 5 ++--- redisai/pipeline.py | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/redisai/dag.py b/redisai/dag.py index 3ef354a..1746010 100644 --- a/redisai/dag.py +++ b/redisai/dag.py @@ -1,11 +1,10 @@ from functools import partial -from typing import AnyStr, Union, Sequence, Any, List +from typing import Any, AnyStr, List, Sequence, Union import numpy as np -from redisai.postprocessor import Processor from redisai import command_builder as builder - +from redisai.postprocessor import Processor processor = Processor() diff --git a/redisai/pipeline.py b/redisai/pipeline.py index 2f6efd6..7a66370 100644 --- a/redisai/pipeline.py +++ b/redisai/pipeline.py @@ -2,9 +2,9 @@ from typing import AnyStr, Sequence, Union import numpy as np +import redis from redisai import command_builder as builder -import redis from redisai.postprocessor import Processor From f17dab01a4de67c25e15ffb5929c96741d396d23 Mon Sep 17 00:00:00 2001 From: AvitalFineRedis Date: Sun, 18 Jul 2021 13:57:38 +0300 Subject: [PATCH 34/34] fix imports --- redisai/dag.py | 5 ++--- redisai/pipeline.py | 3 +-- redisai/utils.py | 3 +-- 3 files changed, 4 insertions(+), 7 deletions(-) diff --git a/redisai/dag.py b/redisai/dag.py index 3ef354a..1746010 100644 --- a/redisai/dag.py +++ b/redisai/dag.py @@ -1,11 +1,10 @@ from functools import partial -from typing import AnyStr, Union, Sequence, Any, List +from typing import Any, AnyStr, List, Sequence, Union import numpy as np -from redisai.postprocessor import Processor from redisai import command_builder as builder - +from redisai.postprocessor import Processor processor = Processor() diff --git a/redisai/pipeline.py b/redisai/pipeline.py index 2f6efd6..53d28b2 100644 --- a/redisai/pipeline.py +++ b/redisai/pipeline.py @@ -2,12 +2,11 @@ from typing import AnyStr, Sequence, Union import numpy as np +import redis from redisai import command_builder as builder -import redis from redisai.postprocessor import Processor - processor = Processor() diff --git a/redisai/utils.py b/redisai/utils.py index 3723bc5..ba41809 100644 --- a/redisai/utils.py +++ b/redisai/utils.py @@ -1,7 +1,6 @@ -from typing import Union, ByteString, Sequence, List, AnyStr, Callable +from typing import AnyStr, ByteString, Callable, List, Sequence, Union import numpy as np - dtype_dict = { "float": "FLOAT", "double": "DOUBLE",