diff --git a/Dockerfile.dev b/.devcontainer/Dockerfile.dev similarity index 66% rename from Dockerfile.dev rename to .devcontainer/Dockerfile.dev index 4d7143a11..80a45f6f7 100644 --- a/Dockerfile.dev +++ b/.devcontainer/Dockerfile.dev @@ -7,14 +7,21 @@ RUN apt-get update && apt-get install -y sudo git locales RUN echo "en_US.UTF-8 UTF-8" | tee -a /etc/locale.gen && locale-gen ######################################################################################################################## -# Create User # +# Create Users # ######################################################################################################################## -# Change root Password to 1234 -RUN echo 'root:1234' | chpasswd -# Create new user: "dev" also with password 1234 -RUN useradd -ms /bin/bash dev && \ - echo 'dev:1234' | chpasswd && \ +RUN < "/etc/discourse/client-api-key" -# Setup the application -cd /tmp/application -pip3 install -r requirements.txt +COPY < /dev/null + apt-get update -y + apt-get install -y docker-ce-cli + touch /var/run/docker.sock + chown root:docker /var/run/docker.sock +EOF diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 1ec1e9853..6436ae7a8 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -6,7 +6,7 @@ // Update the 'dockerComposeFile' list if you have more compose files or use different names. // The .devcontainer/docker-compose.yml file contains any overrides you need/want to make. "dockerComposeFile": [ - "../docker-compose.dev.yml", + "./docker-compose.dev.yml", "docker-compose.yml" ], @@ -17,7 +17,7 @@ // The optional 'workspaceFolder' property is the path VS Code should open by default when // connected. This is typically a file mount in .devcontainer/docker-compose.yml "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", - + // Features to add to the dev container. More info: https://containers.dev/features. // "features": {}, @@ -50,7 +50,7 @@ // "shutdownAction": "none", // Uncomment the next line to run commands after the container is created. - "postCreateCommand": "cd frontend; yarn; cd ../application; make setup; make import-mock-data", + "postCreateCommand": "cd frontend; yarn; cd ../python-client; pip install -e .[test,dev]; cd ../application/; pip install -e .[test,dev]; make setup; make import-mock-data", // Configure tool-specific properties. "customizations": { @@ -60,6 +60,14 @@ "json.format.keepLines": true, "livePreview.portNumber": 3080, "remote.autoForwardPorts": false, + "files.exclude": { + "**/__pycache__": true, + "**/.mypy_cache": true, + "**/.pytest_cache": true, + "**/*.egg-info": true, + "**/node_modules": true, + "application/.data-dumps": true + }, "launch": { "version": "0.2.0", "configurations": [ @@ -68,7 +76,7 @@ "type": "node", "request": "launch", "runtimeExecutable": "yarn", - "cwd": "${workspaceFolder}/frontend", + "cwd": "${workspaceFolder:Frontend}/", "runtimeArgs": [ "dev" ] @@ -82,7 +90,26 @@ ], "django": true, "autoStartBrowser": false, - "program": "${workspaceFolder}/application/src/manage.py" + "program": "${workspaceFolder:Backend}/src/manage.py" + }, + { + "name": "TIRA Backend Tests", + "type": "debugpy", + "request": "launch", + "program": "${workspaceFolder:Backend}/src/manage.py", + "cwd": "${workspaceFolder:Backend}/test", + "args": [ "test", "--failfast", "--settings=settings_test" ], + "django": true, + "env": { "PYTHONPATH": ":../src:.", "DJANGO_SETTINGS_MODULE": "settings_test" }, + "justMyCode": false + }, + { + "name": "Frontend Tests", + "type": "node", + "request": "launch", + "runtimeExecutable": "yarn", + "args": [ "test" ], + "cwd": "${workspaceFolder:Frontend}" } ], "compounds": [ @@ -109,7 +136,9 @@ "ms-python.isort", "ms-python.black-formatter", "ms-python.flake8", - "ms-python.mypy-type-checker" + "ms-python.mypy-type-checker", + "42Crunch.vscode-openapi", + "vuetifyjs.vuetify-vscode" ] } }, diff --git a/.devfiles/README.md b/.devcontainer/devfiles/README.md similarity index 100% rename from .devfiles/README.md rename to .devcontainer/devfiles/README.md diff --git a/.devfiles/authelia/configuration.dev.yml b/.devcontainer/devfiles/authelia/configuration.dev.yml similarity index 100% rename from .devfiles/authelia/configuration.dev.yml rename to .devcontainer/devfiles/authelia/configuration.dev.yml diff --git a/.devfiles/authelia/users-database.yml b/.devcontainer/devfiles/authelia/users-database.yml similarity index 100% rename from .devfiles/authelia/users-database.yml rename to .devcontainer/devfiles/authelia/users-database.yml diff --git a/.devfiles/nginx/auth.conf b/.devcontainer/devfiles/nginx/auth.conf similarity index 100% rename from .devfiles/nginx/auth.conf rename to .devcontainer/devfiles/nginx/auth.conf diff --git a/.devfiles/nginx/certs/tira-dev-selfsigned.crt b/.devcontainer/devfiles/nginx/certs/tira-dev-selfsigned.crt similarity index 100% rename from .devfiles/nginx/certs/tira-dev-selfsigned.crt rename to .devcontainer/devfiles/nginx/certs/tira-dev-selfsigned.crt diff --git a/.devfiles/nginx/certs/tira-dev-selfsigned.key b/.devcontainer/devfiles/nginx/certs/tira-dev-selfsigned.key similarity index 100% rename from .devfiles/nginx/certs/tira-dev-selfsigned.key rename to .devcontainer/devfiles/nginx/certs/tira-dev-selfsigned.key diff --git a/.devfiles/nginx/snippets/authelia-authrequest.conf b/.devcontainer/devfiles/nginx/snippets/authelia-authrequest.conf similarity index 100% rename from .devfiles/nginx/snippets/authelia-authrequest.conf rename to .devcontainer/devfiles/nginx/snippets/authelia-authrequest.conf diff --git a/.devfiles/nginx/snippets/authelia-location.conf b/.devcontainer/devfiles/nginx/snippets/authelia-location.conf similarity index 100% rename from .devfiles/nginx/snippets/authelia-location.conf rename to .devcontainer/devfiles/nginx/snippets/authelia-location.conf diff --git a/.devfiles/nginx/snippets/proxy.conf b/.devcontainer/devfiles/nginx/snippets/proxy.conf similarity index 100% rename from .devfiles/nginx/snippets/proxy.conf rename to .devcontainer/devfiles/nginx/snippets/proxy.conf diff --git a/.devfiles/nginx/snippets/ssl.conf b/.devcontainer/devfiles/nginx/snippets/ssl.conf similarity index 100% rename from .devfiles/nginx/snippets/ssl.conf rename to .devcontainer/devfiles/nginx/snippets/ssl.conf diff --git a/.devfiles/nginx/tira-backend.conf b/.devcontainer/devfiles/nginx/tira-backend.conf similarity index 100% rename from .devfiles/nginx/tira-backend.conf rename to .devcontainer/devfiles/nginx/tira-backend.conf diff --git a/.devfiles/nginx/tira.conf b/.devcontainer/devfiles/nginx/tira.conf similarity index 100% rename from .devfiles/nginx/tira.conf rename to .devcontainer/devfiles/nginx/tira.conf diff --git a/docker-compose.dev.yml b/.devcontainer/docker-compose.dev.yml similarity index 51% rename from docker-compose.dev.yml rename to .devcontainer/docker-compose.dev.yml index b42dd7269..7b7cc8261 100644 --- a/docker-compose.dev.yml +++ b/.devcontainer/docker-compose.dev.yml @@ -1,4 +1,3 @@ -version: '3.8' services: devenv: build: @@ -16,21 +15,21 @@ services: image: ghcr.io/authelia/authelia restart: unless-stopped volumes: - - ./.devfiles/authelia/configuration.dev.yml:/config/configuration.yml - - ./.devfiles/authelia/users-database.yml:/config/users_database.yml + - ./devfiles/authelia/configuration.dev.yml:/config/configuration.yml + - ./devfiles/authelia/users-database.yml:/config/users_database.yml nginx: image: lscr.io/linuxserver/nginx restart: unless-stopped - ports: - - "8080:8080" - - "8081:8081" - - "8082:8082" + #ports: + # - "8080:8080" + # - "8081:8081" + # - "8082:8082" external_links: - "auth:auth.tira.local" - "devenv:www.tira.local" volumes: - - ./.devfiles/nginx/tira.conf:/config/nginx/site-confs/tira.conf - - ./.devfiles/nginx/tira-backend.conf:/config/nginx/site-confs/tira-backend.conf - - ./.devfiles/nginx/auth.conf:/config/nginx/site-confs/auth.conf - - ./.devfiles/nginx/snippets/:/config/nginx/snippets/ - - ./.devfiles/nginx/certs/:/etc/nginx/certs/ + - ./devfiles/nginx/tira.conf:/config/nginx/site-confs/tira.conf + - ./devfiles/nginx/tira-backend.conf:/config/nginx/site-confs/tira-backend.conf + - ./devfiles/nginx/auth.conf:/config/nginx/site-confs/auth.conf + - ./devfiles/nginx/snippets/:/config/nginx/snippets/ + - ./devfiles/nginx/certs/:/etc/nginx/certs/ diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 6aa2b5fc3..829d5f3a4 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -1,4 +1,3 @@ -version: '3.8' services: # Update this to the name of the service you want to work with in your docker-compose.yml file devenv: @@ -13,7 +12,7 @@ services: volumes: # Update this to wherever you want VS Code to mount the folder of your project - - ..:/workspaces:cached + - ..:/workspaces/tira:cached # Uncomment the next four lines if you will use a ptrace-based debugger like C++, Go, and Rust. # cap_add: diff --git a/.github/workflows/run-all-tests.yml b/.github/workflows/run-all-tests.yml deleted file mode 100644 index 2c979eeb8..000000000 --- a/.github/workflows/run-all-tests.yml +++ /dev/null @@ -1,42 +0,0 @@ -name: Unit Tests - -on: [push] - -jobs: - backend-tests: - runs-on: ubuntu-latest - timeout-minutes: 15 - strategy: - matrix: - python-version: ["3.9", "3.10"] - - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - name: Run backend tests - working-directory: ${{github.workspace}}/application - run: | - # Create a dummy DISRAPTOR_API_KEY - sudo bash -c 'mkdir -p "/etc/discourse/" && echo "I am so secret" > "/etc/discourse/client-api-key"' - pip3 install -r requirements.txt - make setup - make tests - - frontend-tests: - runs-on: ubuntu-latest - timeout-minutes: 15 - steps: - - uses: actions/checkout@v4 - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: 18.x - - name: Install dependencies - working-directory: ${{github.workspace}}/frontend - run: yarn --frozen-lockfile - - name: Run frontend tests - working-directory: ${{github.workspace}}/frontend - run: yarn test diff --git a/.github/workflows/test-python-client-on-many-python-versions.yml b/.github/workflows/test-python-client-on-many-python-versions.yml deleted file mode 100644 index 7f61cb613..000000000 --- a/.github/workflows/test-python-client-on-many-python-versions.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: Test Python Client on Many Python Versions -on: [push] - -jobs: - image: - runs-on: ubuntu-latest - timeout-minutes: 15 - strategy: - matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] - steps: - - name: Checkout - uses: actions/checkout@v4 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Install Dependencies - working-directory: ${{github.workspace}}/python-client - run: | - sudo apt-get install -y openjdk-11-jdk - pip3 install .[test,dev] - - name: Running Tests - working-directory: ${{github.workspace}}/python-client - run: | - echo running on branch ${GITHUB_REF##*/} - pytest diff --git a/.github/workflows/test-python-client.yml b/.github/workflows/test-python-client.yml deleted file mode 100644 index e122df229..000000000 --- a/.github/workflows/test-python-client.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Test Python Client -on: [push] - -jobs: - image: - runs-on: ubuntu-latest - timeout-minutes: 15 - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Set up QEMU - uses: docker/setup-qemu-action@v2 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - name: Build Image - run: | - cd python-client - echo running on branch ${GITHUB_REF##*/} - make run-tests - diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 000000000..b3388ce8a --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,75 @@ +name: Unit Tests + +on: [push] + +jobs: + backend-tests: + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + matrix: + python-version: ["3.9", "3.10"] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Install TIRA Python Client from Repo + working-directory: ${{github.workspace}}/python-client + run: | + # Install tira from the repository since the pip version may not be up-to-date enough. + # The install musst be editable (-e) since importing from tira fails otherwise + pip3 install -e .[dev,test] + - name: Install dependencies + working-directory: ${{github.workspace}}/application + run: | + # Create a dummy DISRAPTOR_API_KEY + sudo bash -c 'mkdir -p "/etc/discourse/" && echo "I am so secret" > "/etc/discourse/client-api-key"' + pip3 install -e .[dev,test] + make setup + - name: Run backend tests + working-directory: ${{github.workspace}}/application/test + run: pytest + + frontend-tests: + runs-on: ubuntu-latest + timeout-minutes: 15 + steps: + - uses: actions/checkout@v4 + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: 20.x + - name: Install dependencies + working-directory: ${{github.workspace}}/frontend + run: yarn --frozen-lockfile + - name: Run frontend tests + working-directory: ${{github.workspace}}/frontend + run: yarn test + + python-client-test: + runs-on: ubuntu-latest + timeout-minutes: 15 + strategy: + matrix: + python-version: ["3.8", "3.9", "3.10", "3.11"] + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + - name: Install Dependencies + working-directory: ${{github.workspace}}/python-client + run: | + sudo apt-get install -y openjdk-11-jdk + pip3 install .[test,dev] + - name: Running Tests + working-directory: ${{github.workspace}}/python-client + run: | + echo running on branch ${GITHUB_REF##*/} + pytest diff --git a/Makefile b/Makefile deleted file mode 100644 index 88a6672e3..000000000 --- a/Makefile +++ /dev/null @@ -1,31 +0,0 @@ -.PHONY: help setup run-develop build-docker clean - -.DEFAULT: help -help: - @echo "make setup" - @echo " setup your environment" - @echo "make run-develop" - @echo " run the tira server" - @echo "make tests" - @echo " run all tests (automatically done in Github Actions on each commit)" - @echo "make vite-build" - @echo " build and test the frontnend client code" - @echo "make clean" - @echo " clean the environment" - - -setup: - @cd application && make setup - -run-develop: - @cd application && make run-develop - -tests: - @cd application && make tests - -vite-build: - @cd application && make vite-build - -clean: - @cd application && make clean - diff --git a/README.md b/README.md index a1b42c73b..b0a329d5f 100644 --- a/README.md +++ b/README.md @@ -12,10 +12,10 @@ Current Release - Deployment + Deployment - - Tests + + Tests Linters diff --git a/application/.vscode/settings.json b/application/.vscode/settings.json new file mode 100644 index 000000000..7020c77da --- /dev/null +++ b/application/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "python.testing.pytestArgs": [ ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true, + "python.testing.cwd": "${workspaceFolder}/test" +} \ No newline at end of file diff --git a/application/Makefile b/application/Makefile index b307aa57d..b82a329a7 100644 --- a/application/Makefile +++ b/application/Makefile @@ -13,8 +13,6 @@ help: @echo " run the tira server" @echo "make tests" @echo " run all tests (automatically done in Github Actions on each commit)" - @echo "make vite-build" - @echo " build and test the frontnend client code" @echo "make build-docker" @echo " build the docker containers for deployment" @echo "make clean" @@ -31,9 +29,6 @@ setup: python3 src/manage.py migrate tira python3 src/manage.py index_model -tests: - ./test/run_all_tests.sh - run-develop: python3 src/manage.py makemigrations \ && python3 src/manage.py migrate --fake \ diff --git a/application/README.md b/application/README.md index f8d37ce8d..84411af08 100644 --- a/application/README.md +++ b/application/README.md @@ -25,9 +25,6 @@ Run `make` to get an overview of all commands that will setup a self-contained t 4. Optionally: Change the configuration (the settings used for the development setup are: `tira/application/config/settings-dev.yml`) -## Frontend Development - -Build the frontend code via `make vite-build` ## Docker diff --git a/application/pyproject.toml b/application/pyproject.toml index 042a70457..30388f6b4 100644 --- a/application/pyproject.toml +++ b/application/pyproject.toml @@ -23,4 +23,12 @@ install_types = true exclude = [ "^src/tira/proto/.*\\.py$", "^src/tira/migrations/.*\\.py$", -] \ No newline at end of file +] + +[tool.pytest.ini_options] +DJANGO_SETTINGS_MODULE = "settings_test" +pythonpath = ["./src", "./test"] +python_files = "test_*.py" + +[tool.pytest_env] +HF_HOME = "./tira-root/huggingface" \ No newline at end of file diff --git a/application/requirements.txt b/application/requirements.txt deleted file mode 100644 index 06db2ea7a..000000000 --- a/application/requirements.txt +++ /dev/null @@ -1,28 +0,0 @@ -grpcio>=1.53.2 -# grpcio-tools==1.36.1 # still needed? -protobuf<4.0dev -Django -pyyaml -requests -randomname -tqdm -mysqlclient -python-gitlab -GitPython -python-slugify -ir-datasets -git+https://github.com/mam10eks/diffir -pandas -markdown -PyGithub==1.59.1 -django-extensions -discourse-client-in-disraptor==0.0.8 -tira>=0.0.97 -huggingface-hub - -# Test & Dev dependencies: -approvaltests==7.3.0 -parameterized -mockito -coverage -coverage-badge \ No newline at end of file diff --git a/application/setup.cfg b/application/setup.cfg index 878dac2f8..3be880370 100644 --- a/application/setup.cfg +++ b/application/setup.cfg @@ -1,3 +1,50 @@ + +[options] +python_requires = >=3.9 +include_package_data = True +packages = find: +install_requires = + grpcio>=1.53.2 + # grpcio-tools==1.36.1 # still needed? + protobuf<4.0dev + Django + pyyaml + requests + randomname + tqdm + mysqlclient + python-gitlab + GitPython + python-slugify + ir-datasets + diffir@git+https://github.com/mam10eks/diffir + pandas + markdown + PyGithub==1.59.1 + django-extensions + discourse-client-in-disraptor==0.0.8 + tira>=0.0.97 + huggingface-hub + djangorestframework==3.15.1 + django-filter==24.2 + djangorestframework-jsonapi==7.0.0 + +[options.extras_require] +test = + mockito + parameterized + approvaltests==7.3.0 + pytest-django + pytest-env==1.1.3 +dev = + coverage + coverage-badge + black + flake8 + isort + mypy + + [flake8] max-line-length = 120 extend-ignore = E203 diff --git a/application/src/django_admin/settings.py b/application/src/django_admin/settings.py index b656f9503..cb84e7347 100644 --- a/application/src/django_admin/settings.py +++ b/application/src/django_admin/settings.py @@ -70,12 +70,13 @@ INSTALLED_APPS = [ "tira.apps.TiraConfig", - "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", - "django.contrib.staticfiles", + "django_filters", + "rest_framework", + "rest_framework_json_api", ] MIDDLEWARE = [ @@ -88,6 +89,11 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", ] +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": ("tira.authentication.TrustedHeaderAuthentication",), + "DEFAULT_FILTER_BACKENDS": ("rest_framework_json_api.django_filters.DjangoFilterBackend",), +} + ROOT_URLCONF = "django_admin.urls" TEMPLATES = [ @@ -380,15 +386,6 @@ def logger_config(log_dir: Path): USE_TZ = True -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.1/howto/static-files/ - -STATIC_URL = "/public/" - -STATICFILES_DIRS = [BASE_DIR / "tira/static/"] - -STATIC_ROOT = "/var/www/public" - DISCOURSE_API_URL = "https://www.tira.io" PUBLIC_TRAINING_DATA = set(["jena-topics-20231026-test", "leipzig-topics-20231025-test"]) diff --git a/application/src/django_admin/urls.py b/application/src/django_admin/urls.py index a4fb8f3c6..fb5bb717f 100644 --- a/application/src/django_admin/urls.py +++ b/application/src/django_admin/urls.py @@ -14,12 +14,8 @@ 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ -from django.conf import settings -from django.conf.urls.static import static -from django.contrib import admin from django.urls import include, path urlpatterns = [ - path("admin/", admin.site.urls), path("", include("tira.urls")), -] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) +] diff --git a/application/src/tira/authentication.py b/application/src/tira/authentication.py index 36c8a51b5..7118380ca 100644 --- a/application/src/tira/authentication.py +++ b/application/src/tira/authentication.py @@ -475,3 +475,72 @@ def user_is_organizer_for_endpoint( auth = Authentication(authentication_source=settings.DEPLOYMENT) + + +""" +Trusted Header Authentication implementation to integrate with Django +""" + +from typing import NamedTuple + +from django.contrib.auth.models import AnonymousUser +from rest_framework import authentication + +_DISRAPTOR_APP_SECRET_KEY = os.getenv("DISRAPTOR_APP_SECRET_KEY") + + +class User(NamedTuple): + username: str + is_staff: bool + + +class TiraGuest(AnonymousUser): + def __init__(self) -> None: + super().__init__() + self.username = "guest" + self._groups: list[str] = [] + self.is_staff = False + + def __str__(self) -> str: + return self.username + + @property + def is_anonymous(self): + return True + + @property + def is_authenticated(self): + return False + + +class TiraUser(AnonymousUser): + def __init__(self, username: str, groups: list[str]) -> None: + super().__init__() + self.username = username + self._groups = groups + self.is_staff = "admins" in groups or "tira_reviewer" in groups + + def __str__(self) -> str: + return self.username + + @property + def is_anonymous(self): + return False + + @property + def is_authenticated(self): + return True + + +class TrustedHeaderAuthentication(authentication.BaseAuthentication): + + def authenticate(self, request) -> tuple[User, None]: + if not request.headers.get("X-Disraptor-App-Secret-Key", None) == _DISRAPTOR_APP_SECRET_KEY: + return HttpResponseNotAllowed("Access forbidden.") + username = request.headers.get("X-Disraptor-User") + groups = request.headers.get("X-Disraptor-Groups") + grouplist = [] if not groups else groups.split(",") + if not username: + return (TiraGuest(), None) + + return (TiraUser(username, grouplist), None) diff --git a/application/src/tira/data/HybridDatabase.py b/application/src/tira/data/HybridDatabase.py index 0df26243b..404d5a643 100644 --- a/application/src/tira/data/HybridDatabase.py +++ b/application/src/tira/data/HybridDatabase.py @@ -1568,7 +1568,7 @@ def add_vm(self, vm_id, user_name, initial_user_password, ip, host, ssh, rdp): vm_id=vm_id, user_password=initial_user_password, roles="user", host=host, ip=ip, ssh=ssh, rdp=rdp ) except IntegrityError as e: - logger.exception(f"Failed to add new vm {vm_id} with ", e) + logger.exception(f"Failed to add new vm {vm_id} with ", exc_info=e) raise TiraModelIntegrityError(e) else: raise TiraModelWriteError(f"Failed to write VM {vm_id}") @@ -2264,7 +2264,7 @@ def update(x, y): self._save_run(dataset_id, vm_id, run_id, run) except Exception as e: - raise TiraModelWriteError(f"Exception while saving run ({dataset_id}, {vm_id}, {run_id})", e) + raise TiraModelWriteError(f"Exception while saving run ({dataset_id}, {vm_id}, {run_id})", exc_info=e) def _fdb_edit_task( self, @@ -2529,7 +2529,7 @@ def delete_dataset(self, dataset_id): # self._fdb_delete_evaluator_from_vm(vm_id, evaluator_id) # except AttributeError as e: # logger.exception(f"Exception deleting evaluator while deleting dataset {dataset_id}. " - # f"Maybe It never existed?", e) + # f"Maybe It never existed?", exc_info=e) # self._fdb_delete_dataset_from_task(task_id, dataset_id) # self._fdb_delete_dataset(task_id, dataset_id) # ds.delete() diff --git a/application/src/tira/data/data.py b/application/src/tira/data/data.py index 4a9e13355..b63dadf4a 100644 --- a/application/src/tira/data/data.py +++ b/application/src/tira/data/data.py @@ -363,7 +363,7 @@ def parse_run(runs_dir_path, dataset_id, vm_id, run_id): except modeldb.VirtualMachine.DoesNotExist as e: # If the vm was deleted but runs still exist, we land here. We skip indexing these runs. msg = f"Skip run {run_id}: VM {vm_id} does not exist" - logger.exception(msg, e) + logger.exception(msg, exc_info=e) return msg # Error Correction: Skip runs where Dataset no not exist anymore @@ -372,7 +372,7 @@ def parse_run(runs_dir_path, dataset_id, vm_id, run_id): except modeldb.Dataset.DoesNotExist as e: # If the dataset was deleted, but there are still runs left. msg = f"Skip run {run_id}: Dataset {run_proto.inputDataset} does not exist {e}" - logger.exception(msg, e) + logger.exception(msg, exc_info=e) return msg # Error Correction. If run files dont add a task_id (which is optional), we use the default task of the dataset @@ -388,7 +388,7 @@ def parse_run(runs_dir_path, dataset_id, vm_id, run_id): except Exception as e: msg = f"Skip run {run_id}: Creation of run had an unexpected ErrorRun: {run_proto}" - logger.exception(msg, e) + logger.exception(msg, exc_info=e) return msg # If this run has an input run (i.e. it's an evaluation) we set the reference here. diff --git a/application/src/tira/endpoints/admin_api.py b/application/src/tira/endpoints/admin_api.py index c3fc9b94e..7dcb039e0 100644 --- a/application/src/tira/endpoints/admin_api.py +++ b/application/src/tira/endpoints/admin_api.py @@ -28,7 +28,7 @@ def decorate(request, *args, **kwargs): msg = func(*args, **kwargs) return JsonResponse({"status": 0, "message": msg}, status=HTTPStatus.OK) except Exception as e: - logger.exception(f"{func.__name__} failed with {e}", e) + logger.exception(f"{func.__name__} failed with {e}", exc_info=e) return JsonResponse( {"status": 1, "message": f"{func.__name__} failed with {e}"}, status=HTTPStatus.INTERNAL_SERVER_ERROR, diff --git a/application/src/tira/endpoints/misc.py b/application/src/tira/endpoints/misc.py new file mode 100644 index 000000000..2b5435c6a --- /dev/null +++ b/application/src/tira/endpoints/misc.py @@ -0,0 +1,44 @@ +""" +This file contains miscellaneous and **unversioned** endpoints (e.g., the /health or /info). +""" + +from django.urls import path +from rest_framework import status +from rest_framework.decorators import api_view +from rest_framework.request import Request +from rest_framework.response import Response + +# TODO: this does not work so I hardcoded for now +# from tira import __version__ as tira_version + +tira_version = "0.0.136" +rest_api_version = "v1.0.0-draft" + + +@api_view(["GET"]) +def health_endpoint(request: Request) -> Response: + """ + The /health endpoint returns 2xx on success (currently 204 because we don't respond with any content). It can be + used to check if the REST-API is served. + """ + return Response(status=status.HTTP_204_NO_CONTENT) + + +@api_view(["GET"]) +def info_endpoint(request: Request) -> Response: + """ + The /info endpoint contains general information about the running server (e.g., the version of TIRA that is + running). Do not add any sensitive information to this endpoint as it is **public**! + """ + return Response( + { + "version": tira_version, + "restApiVersion": rest_api_version, + } + ) + + +endpoints = [ + path("health", health_endpoint), + path("info", info_endpoint), +] diff --git a/application/src/tira/endpoints/v1/__init__.py b/application/src/tira/endpoints/v1/__init__.py new file mode 100644 index 000000000..03673625b --- /dev/null +++ b/application/src/tira/endpoints/v1/__init__.py @@ -0,0 +1,17 @@ +from django.urls import include, path + +from ._datasets import endpoints as dataset_endpoints +from ._evaluations import endpoints as evaluation_endpoints +from ._organizers import endpoints as organizer_endpoints +from ._runs import endpoints as run_endpoints +from ._tasks import endpoints as task_endpoints +from ._user import endpoints as user_endpoints + +endpoints = [ + path("datasets/", include(dataset_endpoints)), + path("evaluations/", include(evaluation_endpoints)), + path("organizers/", include(organizer_endpoints)), + path("runs/", include(run_endpoints)), + path("tasks/", include(task_endpoints)), + path("user/", include(user_endpoints)), +] diff --git a/application/src/tira/endpoints/v1/_datasets.py b/application/src/tira/endpoints/v1/_datasets.py new file mode 100644 index 000000000..2d2fb8d50 --- /dev/null +++ b/application/src/tira/endpoints/v1/_datasets.py @@ -0,0 +1,43 @@ +from django.urls import path +from rest_framework import pagination +from rest_framework.permissions import IsAdminUser +from rest_framework.serializers import CharField, ModelSerializer +from rest_framework_json_api.views import ModelViewSet + +from ... import model as modeldb +from ._tasks import TaskSerializer + + +class DatasetSerializer(ModelSerializer): + id = CharField(source="dataset_id") + default_task = TaskSerializer() + + class Meta: + model = modeldb.Dataset + fields = [ + "id", + "default_task", + "display_name", + "evaluator", + "is_confidential", + "is_deprecated", + "data_server", + "released", + "default_upload_name", + "created", + "last_modified", + ] + + +class _DatasetView(ModelViewSet): + queryset = modeldb.Dataset.objects.all() + serializer_class = DatasetSerializer + pagination_class = pagination.CursorPagination + lookup_field = "dataset_id" + permission_classes = [IsAdminUser] # TODO: set to something sensible + + +endpoints = [ + path("", _DatasetView.as_view({"get": "list"})), + path("/", _DatasetView.as_view({"get": "retrieve", "delete": "destroy"})), +] diff --git a/application/src/tira/endpoints/v1/_evaluations.py b/application/src/tira/endpoints/v1/_evaluations.py new file mode 100644 index 000000000..08b280a73 --- /dev/null +++ b/application/src/tira/endpoints/v1/_evaluations.py @@ -0,0 +1,25 @@ +from django.urls import path +from rest_framework import pagination +from rest_framework.permissions import IsAdminUser +from rest_framework.serializers import ModelSerializer +from rest_framework_json_api.views import ModelViewSet + +from ... import model as modeldb + + +class EvaluationSerializer(ModelSerializer): + class Meta: + model = modeldb.Evaluation + fields = ["measure_key", "measure_value", "evaluator", "run"] + + +class _EvaluationView(ModelViewSet): + queryset = modeldb.Evaluation.objects.all() + serializer_class = EvaluationSerializer + pagination_class = pagination.CursorPagination + permission_classes = [IsAdminUser] # TODO: set to something sensible + + +endpoints = [ + path("", _EvaluationView.as_view({"get": "list"})), +] diff --git a/application/src/tira/endpoints/v1/_organizers.py b/application/src/tira/endpoints/v1/_organizers.py new file mode 100644 index 000000000..fdc100350 --- /dev/null +++ b/application/src/tira/endpoints/v1/_organizers.py @@ -0,0 +1,49 @@ +from django.urls import path +from rest_framework import pagination +from rest_framework.permissions import IsAdminUser +from rest_framework.serializers import CharField, ModelSerializer +from rest_framework_json_api.views import ModelViewSet + +from ... import model as modeldb + + +class OrganizerSerializer(ModelSerializer): + id = CharField(source="organizer_id") + website = CharField(source="web") + + class Meta: + model = modeldb.Organizer + fields = ["id", "name", "years", "website"] + + +# TODO: creating an organizer should behave like: admin_add_organizer +# TODO: editing an organizer should behave like: admin_edit_organizer + + +class _OrganizerView(ModelViewSet): + queryset = modeldb.Organizer.objects.all() + serializer_class = OrganizerSerializer + pagination_class = pagination.CursorPagination + permission_classes = [IsAdminUser] # TODO: set to something sensible + lookup_field = "organizer_id" + + filterset_fields = { + "name": ( + "exact", + "contains", + ), + "web": ( + "exact", + "contains", + ), + "years": ( + "exact", + "contains", + ), + } + + +endpoints = [ + path("", _OrganizerView.as_view({"get": "list", "post": "create"})), + path("/", _OrganizerView.as_view({"get": "retrieve", "delete": "destroy"})), +] diff --git a/application/src/tira/endpoints/v1/_runs.py b/application/src/tira/endpoints/v1/_runs.py new file mode 100644 index 000000000..cea1958c9 --- /dev/null +++ b/application/src/tira/endpoints/v1/_runs.py @@ -0,0 +1,62 @@ +from django.urls import path +from rest_framework import pagination +from rest_framework.generics import RetrieveAPIView +from rest_framework.permissions import IsAdminUser +from rest_framework.serializers import CharField, ModelSerializer, Serializer +from rest_framework_json_api.views import ModelViewSet + +from ... import model as modeldb + + +class _RunSerializer(Serializer): + id = CharField(source="run_id") + + class Meta: + model = modeldb.Run + fields = ["id", "downloadable", "deleted"] + + +class _ReviewSerializer(ModelSerializer): + run_id = CharField(source="run") + + class Meta: + model = modeldb.Review + fields = [ + "run_id", + "reviewer_id", + "review_date", + "no_errors", + "missing_output", + "extraneous_output", + "invalid_output", + "has_error_output", + "other_errors", + "comment", + "has_errors", + "has_warnings", + "has_no_errors", + "published", + "blinded", + ] + + +class _RunView(ModelViewSet): + queryset = modeldb.Run.objects.all() + serializer_class = _RunSerializer + pagination_class = pagination.CursorPagination + lookup_field = "run_id" + permission_classes = [IsAdminUser] # TODO: set to something sensible + + +class _ReviewDetailView(RetrieveAPIView): + queryset = modeldb.Review + serializer_class = _ReviewSerializer + lookup_field = "run" + permission_classes = [IsAdminUser] # TODO: set to something sensible + + +endpoints = [ + path("", _RunView.as_view({"get": "list"})), + path("/", _RunView.as_view({"get": "retrieve", "delete": "destroy"})), + path("/review", _ReviewDetailView.as_view()), +] diff --git a/application/src/tira/endpoints/v1/_tasks.py b/application/src/tira/endpoints/v1/_tasks.py new file mode 100644 index 000000000..5cfad5916 --- /dev/null +++ b/application/src/tira/endpoints/v1/_tasks.py @@ -0,0 +1,74 @@ +from django.urls import path +from rest_framework import pagination +from rest_framework.permissions import IsAdminUser +from rest_framework.serializers import CharField, ModelSerializer +from rest_framework_json_api.views import ModelViewSet + +from ... import model as modeldb +from ._evaluations import EvaluationSerializer +from ._organizers import OrganizerSerializer + + +class DatasetNameOnlySerializer(ModelSerializer): + id = CharField(source="dataset_id") + + class Meta: + model = modeldb.Dataset + fields = ["id", "display_name"] + + +class TaskSerializer(ModelSerializer): + id = CharField(source="task_id") + name = CharField(source="task_name") + description = CharField(source="task_description") + organizer = OrganizerSerializer() + website = CharField(source="web") + datasets = DatasetNameOnlySerializer(source="dataset_set", many=True, required=False, read_only=True) + + class Meta: + model = modeldb.Task + fields = "__all__" + + +class RegistrationSerializer(ModelSerializer): + + class Meta: + model = modeldb.Registration + fields = "__all__" + + +class _TaskView(ModelViewSet): + queryset = modeldb.Task.objects.all() + serializer_class = TaskSerializer + pagination_class = pagination.CursorPagination + permission_classes = [IsAdminUser] # TODO: set to something sensible + lookup_field = "task_id" + + +class _EvaluationView(ModelViewSet): + serializer_class = EvaluationSerializer + pagination_class = pagination.CursorPagination + permission_classes = [IsAdminUser] # TODO: set to something sensible + lookup_field = "task_id" + + def get_queryset(self): + return modeldb.Evaluation.objects.filter(run__task=self.kwargs[self.lookup_field]) + + +class _RegistrationView(ModelViewSet): + serializer_class = RegistrationSerializer + pagination_class = pagination.CursorPagination + permission_classes = [IsAdminUser] # TODO: set to something sensible + lookup_field = "task_id" + + def get_queryset(self): + return modeldb.Registration.objects.filter(registered_on_task=self.kwargs[self.lookup_field]) + + +endpoints = [ + path("", _TaskView.as_view({"get": "list", "post": "create"})), + path("/", _TaskView.as_view({"get": "retrieve", "delete": "destroy"})), + path("/evaluations", _EvaluationView.as_view({"get": "list"})), + path("/registrations", _RegistrationView.as_view({"get": "list", "post": "create"})), + # path("/submissions", _SubmissionView.as_view({'get': 'list', 'post': 'create'})), +] diff --git a/application/src/tira/endpoints/v1/_user.py b/application/src/tira/endpoints/v1/_user.py new file mode 100644 index 000000000..7c74362f5 --- /dev/null +++ b/application/src/tira/endpoints/v1/_user.py @@ -0,0 +1,14 @@ +from django.urls import path +from rest_framework.decorators import api_view +from rest_framework.request import Request +from rest_framework.response import Response + + +@api_view(["GET"]) +def user_endpoint(request: Request) -> Response: + return Response({"username": request.user.username, "groups": request.user.groups}) + + +endpoints = [ + path("", user_endpoint), +] diff --git a/application/src/tira/git_runner_integration.py b/application/src/tira/git_runner_integration.py index 5422093db..356cc9724 100644 --- a/application/src/tira/git_runner_integration.py +++ b/application/src/tira/git_runner_integration.py @@ -799,7 +799,7 @@ def get_manifest_of_docker_image_image_repository(self, repository_name, tag, ca "digest": image_metadata["config"]["digest"].split(":")[-1][:12], } except Exception as e: - logger.warn("Exception during loading of metadata for docker image", e) + logger.warn("Exception during loading of metadata for docker image", exc_info=e) ret = { "architecture": "Loading...", "created": "Loading...", @@ -1221,7 +1221,7 @@ def extract_job_configuration(self, gl_project, branch): if len(i.split("=")) == 2 } except Exception as e: - logger.warn(f'Could not extract job configuration on "{branch}".', e) + logger.warn(f'Could not extract job configuration on "{branch}".', exc_info=e) pass if ( diff --git a/application/src/tira/huggingface_hub_integration.py b/application/src/tira/huggingface_hub_integration.py index 92b35886f..4724d6eb7 100644 --- a/application/src/tira/huggingface_hub_integration.py +++ b/application/src/tira/huggingface_hub_integration.py @@ -1,35 +1,11 @@ -import os -import sys -from importlib.util import module_from_spec, spec_from_file_location -from types import ModuleType -from typing import Iterable +from typing import Iterable, Optional -from huggingface_hub import scan_cache_dir, snapshot_download +from huggingface_hub import HFCacheInfo, scan_cache_dir, snapshot_download +from huggingface_hub.constants import HF_HOME +import tira.io_utils as tira_cli_io_utils -def load_tira_cli_io_utils() -> ModuleType: - for p in sys.path: - p = str(os.path.abspath(p)) + "/" - if "-packages/" in p: - p = p.split("-packages/")[0] + "-packages/" - - if os.path.exists(f"{p}/tira/io_utils.py"): - tira_cli_io_utils_spec = spec_from_file_location("tira_cli.io_utils", f"{p}/tira/io_utils.py") - assert tira_cli_io_utils_spec is not None - assert tira_cli_io_utils_spec.loader is not None - tira_cli_io_utils = module_from_spec(tira_cli_io_utils_spec) - assert tira_cli_io_utils is not None - tira_cli_io_utils_spec.loader.exec_module(tira_cli_io_utils) - return tira_cli_io_utils - - raise ModuleNotFoundError() - - -tira_cli_io_utils = load_tira_cli_io_utils() - - -TIRA_HOST_HF_HOME = tira_cli_io_utils._default_hf_home_in_tira_host() -HF_CACHE = None +HF_CACHE: Optional[HFCacheInfo] = None def _hf_repos() -> dict[str, str]: @@ -56,9 +32,8 @@ def huggingface_model_mounts(models: Iterable[str]): else: raise Exception(f"Model {model} is not available in the Huggingface cache") - return {"MOUNT_HF_MODEL": " ".join(models), "HF_HOME": TIRA_HOST_HF_HOME, "HF_CACHE_SCAN": ret} + return {"MOUNT_HF_MODEL": " ".join(models), "HF_HOME": HF_HOME, "HF_CACHE_SCAN": ret} def snapshot_download_hf_model(model: str): - os.environ["HF_HOME"] = TIRA_HOST_HF_HOME snapshot_download(repo_id=model.replace("--", "/")) diff --git a/application/src/tira/management/commands/cache_daemon.py b/application/src/tira/management/commands/cache_daemon.py index b5b302c7a..21ff57155 100644 --- a/application/src/tira/management/commands/cache_daemon.py +++ b/application/src/tira/management/commands/cache_daemon.py @@ -59,7 +59,7 @@ def keep_running_softwares_fresh(self, sleep_time): ) except Exception as e: print(f"Exception during refreshing the repository {git_repository_id}: e") - logger.warn(f"Exception during refreshing the repository {git_repository_id}", e) + logger.warn(f"Exception during refreshing the repository {git_repository_id}", exc_info=e) continue time.sleep(0.1) diff --git a/application/src/tira/permissions.py b/application/src/tira/permissions.py new file mode 100644 index 000000000..f4f85a553 --- /dev/null +++ b/application/src/tira/permissions.py @@ -0,0 +1,20 @@ +from django.http import HttpRequest +from rest_framework.permissions import SAFE_METHODS, BasePermission + + +class ReadOnly(BasePermission): + def has_permission(self, request: HttpRequest, view): + return request.method in SAFE_METHODS + + +class IsOrganizer(BasePermission): + + def has_permission(self, request, view): + return True + + def has_object_permission(self, request: HttpRequest, view, obj) -> bool: + print(request) + print(view) + print(obj) + # TODO: implement + return False diff --git a/application/src/tira/urls.py b/application/src/tira/urls.py index 0f8b89e17..72cca7c10 100644 --- a/application/src/tira/urls.py +++ b/application/src/tira/urls.py @@ -1,9 +1,13 @@ -from django.urls import path +from typing import Union + +from django.urls import URLPattern, URLResolver, include, path from . import views from .endpoints import admin_api, data_api, diffir_api, organizer_api, serp_api, vm_api +from .endpoints.misc import endpoints as misc_endpoints +from .endpoints.v1 import endpoints as v1_endpoints -urlpatterns = [ +urlpatterns: list[Union[URLResolver, URLPattern]] = [ path( "task//user//dataset//download/.zip", views.download_rundir, @@ -241,6 +245,8 @@ serp_api.serp, name="serp", ), + *misc_endpoints, + path("v1/", include(v1_endpoints)), ] app_name = "tira" diff --git a/application/test/_utils/mixins.py b/application/test/_utils/mixins.py new file mode 100644 index 000000000..e60304b56 --- /dev/null +++ b/application/test/_utils/mixins.py @@ -0,0 +1,33 @@ +from abc import ABC, abstractmethod +from typing import Optional +from unittest.util import _common_shorten_repr + + +class StrAssertMixins(ABC): + """A mixin class for adding further string related assertions to a test case. + + The inheriting class must implement ``fail`` and ``_formatMessage`` methods, which behave similar to + `unittest.TestCase`. The most straight forward way is to use the mixin together with a `unittest.TestCase`: + + .. code:: python + + from unittest import TestCase + + class MyTest(TestCase, StrAssertMixins): + + def testcase(self): + self.assertStartsWith("foobar", "foo") # Success + self.assertStartsWith("foobar", "bar") # Fail + """ + + @abstractmethod + def _formatMessage(self, msg: Optional[str], standardMsg: str) -> str: ... + + @abstractmethod + def fail(self, msg: Optional[str] = None) -> None: ... + + def assertStartsWith(self, string: str, prefix: str, msg: Optional[str] = None) -> None: + if not string.startswith(prefix): + standardMsg = "not %s.startswith(%s)" % _common_shorten_repr(string, prefix) + msg = self._formatMessage(msg, standardMsg) + self.fail(msg) diff --git a/application/test/api_access_matrix.py b/application/test/api_access_matrix.py index 5f735f469..9f85c0dee 100644 --- a/application/test/api_access_matrix.py +++ b/application/test/api_access_matrix.py @@ -727,7 +727,12 @@ ), route_to_test( url_pattern="task//vm//delete_software/docker/", - params={"task_id": "shared-task-1", "vm_id": "example_participant", "software_id": 0}, + params={ + "task_id": "shared-task-1", + "vm_id": "example_participant", + "software_id": 0, + "docker_software_id": "", + }, group_to_expected_status_code={ ADMIN: 200, GUEST: 302, @@ -738,7 +743,12 @@ ), route_to_test( url_pattern="task//vm//delete_software/docker/", - params={"task_id": "shared-task-1", "vm_id": PARTICIPANT.split("_")[-1], "software_id": 0}, + params={ + "task_id": "shared-task-1", + "vm_id": PARTICIPANT.split("_")[-1], + "software_id": 0, + "docker_software_id": "", + }, group_to_expected_status_code={ ADMIN: 200, GUEST: 302, @@ -749,7 +759,12 @@ ), route_to_test( url_pattern="task//vm//delete_software/docker/", - params={"task_id": "task-of-organizer-1", "vm_id": "example_participant", "software_id": 0}, + params={ + "task_id": "task-of-organizer-1", + "vm_id": "example_participant", + "software_id": 0, + "docker_software_id": "", + }, group_to_expected_status_code={ ADMIN: 200, GUEST: 302, @@ -1041,6 +1056,7 @@ "dataset_id": "does-not-exist", "docker_software_id": "does-not-exist", "rerank_dataset": "none", + "docker_resources": "", }, group_to_expected_status_code={ ADMIN: 200, @@ -1058,6 +1074,7 @@ "dataset_id": "does-not-exist", "docker_software_id": "does-not-exist", "rerank_dataset": "none", + "docker_resources": "", }, group_to_expected_status_code={ ADMIN: 200, @@ -1987,10 +2004,273 @@ ORGANIZER: 200, }, ), + route_to_test( + url_pattern="health", + params={}, + group_to_expected_status_code={ + GUEST: 204, + PARTICIPANT: 204, + ORGANIZER_WRONG_TASK: 204, + ORGANIZER: 204, + ADMIN: 204, + }, + ), + route_to_test( + url_pattern="info", + params={}, + group_to_expected_status_code={ + GUEST: 200, + PARTICIPANT: 200, + ORGANIZER_WRONG_TASK: 200, + ORGANIZER: 200, + ADMIN: 200, + }, + ), + # The following v1/ endpoints should be restricted to only allow admin-access for now + route_to_test( + url_pattern="v1/datasets/", + params={}, + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 200, + }, + ), + route_to_test( + url_pattern="v1/datasets//", + params={"dataset_id": "i-do-not-exist"}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/datasets//", + params={"dataset_id": "i-do-not-exist"}, + method="DELETE", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/evaluations/", + params={}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 200, + }, + ), + route_to_test( + url_pattern="v1/organizers/", + params={}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 200, + }, + ), + route_to_test( + url_pattern="v1/organizers/", + params={}, + method="POST", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + # ADMIN: 200, # TODO: replace with correct code once the POST is properly implemented + }, + ), + route_to_test( + url_pattern="v1/organizers//", + params={"organizer_id": "does-not-exist"}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/organizers//", + params={"organizer_id": "does-not-exist"}, + method="DELETE", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/runs/", + params={}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 200, + }, + ), + route_to_test( + url_pattern="v1/runs//", + params={"run_id": "does-not-exist"}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/runs//", + params={"run_id": "does-not-exist"}, + method="DELETE", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/runs//review", + params={"run": "does-not-exist"}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/tasks/", + params={}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 200, + }, + ), + route_to_test( + url_pattern="v1/tasks/", + params={}, + method="POST", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + # ADMIN: 200, # TODO: replace with correct code once the POST is properly implemented + }, + ), + route_to_test( + url_pattern="v1/tasks//", + params={"task_id": "does-not-exist"}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/tasks//", + params={"task_id": "does-not-exist"}, + method="DELETE", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + ADMIN: 404, + }, + ), + route_to_test( + url_pattern="v1/tasks//evaluations", + params={"task_id": "does-not-exist"}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + # ADMIN: 404, # FIXME: this does not currently work + ADMIN: 200, + }, + ), + route_to_test( + url_pattern="v1/tasks//registrations", + params={"task_id": "does-not-exist"}, + method="GET", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + # ADMIN: 404, # FIXME: this does not currently work + ADMIN: 200, + }, + ), + route_to_test( + url_pattern="v1/tasks//registrations", + params={"task_id": "does-not-exist"}, + method="POST", + group_to_expected_status_code={ + GUEST: 403, + PARTICIPANT: 403, + ORGANIZER_WRONG_TASK: 403, + ORGANIZER: 403, + # ADMIN: 404, # TODO: these should give 404 for non-existant tasks. That is not currently the case + }, + ), + route_to_test( + url_pattern="v1/user/", + params={}, + group_to_expected_status_code={ + GUEST: 200, + PARTICIPANT: 200, + ORGANIZER_WRONG_TASK: 200, + ORGANIZER: 200, + ADMIN: 200, + }, + ), ] -def access_matrix_for_user(user): +def access_matrix_for_user(user: str) -> list[tuple]: ret = [] for i in API_ACCESS_MATRIX: if user not in i[2]: diff --git a/application/test/diffir_tests/test_diffir_endpoint.py b/application/test/diffir_tests/test_diffir_endpoint.py index 13b64142a..90d5ebff8 100644 --- a/application/test/diffir_tests/test_diffir_endpoint.py +++ b/application/test/diffir_tests/test_diffir_endpoint.py @@ -1,3 +1,4 @@ +from _utils.mixins import StrAssertMixins from api_access_matrix import ADMIN from django.test import TestCase from utils_for_testing import method_for_url_pattern, mock_request, set_up_tira_environment @@ -6,14 +7,24 @@ diffir = method_for_url_pattern(url) -class TestDiffirEndpoint(TestCase): +class TestDiffirEndpoint(TestCase, StrAssertMixins): @classmethod def setUpClass(cls): set_up_tira_environment() def test_diffir_with_json(self): # Arrange - request = mock_request(ADMIN, url) + request = mock_request( + ADMIN, + url, + params={ + "task_id": "", # "t1", + "vm_id": "", # "example_participant", + "dataset_id": "", # "dataset-1", + "topk": "", # 10, + "run_id": "", # "run-3-example_participant", + }, + ) # Act actual = diffir( @@ -26,11 +37,21 @@ def test_diffir_with_json(self): ) # Assert - self.assertTrue(actual.content.decode("utf-8").startswith("")) + self.assertStartsWith(actual.content.decode("utf-8"), "") def test_diffir_with_json_gz(self): # Arrange - request = mock_request(ADMIN, url) + request = mock_request( + ADMIN, + url, + params={ + "task_id": "", # "t1", + "vm_id": "", # "example_participant", + "dataset_id": "", # "dataset-1", + "topk": "", # 10, + "run_id": "", # "run-5-example_participant", + }, + ) # Act actual = diffir( @@ -43,7 +64,7 @@ def test_diffir_with_json_gz(self): ) # Assert - self.assertTrue(actual.content.decode("utf-8").startswith("")) + self.assertStartsWith(actual.content.decode("utf-8"), "") @classmethod def tearDownClass(cls): diff --git a/application/test/evaluation_api_integration_tests/test_evaluation_results_for_single_dataset.py b/application/test/evaluation_api_integration_tests/test_evaluation_results_for_single_dataset.py index 478ae6841..3958a3375 100644 --- a/application/test/evaluation_api_integration_tests/test_evaluation_results_for_single_dataset.py +++ b/application/test/evaluation_api_integration_tests/test_evaluation_results_for_single_dataset.py @@ -20,7 +20,7 @@ def setUpClass(cls): def test_for_non_existing_task_and_dataset(self): # Arrange - request = mock_request(GUEST, url) + request = mock_request(GUEST, url, params={"task_id": "", "dataset_id": ""}) # Act actual = evaluations_function(request, task_id="does-not-exist", dataset_id="does-not-exist") @@ -30,7 +30,7 @@ def test_for_non_existing_task_and_dataset(self): def test_for_existing_task_and_dataset_with_few_evaluations(self): # Arrange - request = mock_request(GUEST, url) + request = mock_request(GUEST, url, params={"task_id": "", "dataset_id": ""}) # Act actual = evaluations_function(request, task_id="shared-task-1", dataset_id=dataset_1) @@ -40,7 +40,7 @@ def test_for_existing_task_and_dataset_with_few_evaluations(self): def test_for_existing_task_and_dataset_with_few_evaluations_including_blinded(self): # Arrange - request = mock_request(ADMIN, url) + request = mock_request(ADMIN, url, params={"task_id": "", "dataset_id": ""}) # Act actual = evaluations_function(request, task_id="shared-task-1", dataset_id=dataset_1) @@ -50,7 +50,7 @@ def test_for_existing_task_and_dataset_with_few_evaluations_including_blinded(se def test_for_existing_task_and_meta_dataset_with_few_evaluations(self): # Arrange - request = mock_request(GUEST, url) + request = mock_request(GUEST, url, params={"task_id": "", "dataset_id": ""}) # Act actual = evaluations_function(request, task_id="shared-task-1", dataset_id=dataset_meta) @@ -60,7 +60,7 @@ def test_for_existing_task_and_meta_dataset_with_few_evaluations(self): def test_for_existing_task_and_dataset_with_little_evaluations(self): # Arrange - request = mock_request(GUEST, url) + request = mock_request(GUEST, url, params={"task_id": "", "dataset_id": ""}) # Act actual = evaluations_function(request, task_id="shared-task-1", dataset_id=dataset_2) @@ -70,7 +70,7 @@ def test_for_existing_task_and_dataset_with_little_evaluations(self): def test_for_existing_task_and_dataset_with_little_evaluations_including_blinded(self): # Arrange - request = mock_request(ADMIN, url) + request = mock_request(ADMIN, url, params={"task_id": "", "dataset_id": ""}) # Act actual = evaluations_function(request, task_id="shared-task-1", dataset_id=dataset_2) diff --git a/application/test/evaluation_api_integration_tests/test_evaluations_for_vm.py b/application/test/evaluation_api_integration_tests/test_evaluations_for_vm.py index efeaee166..00339d85c 100644 --- a/application/test/evaluation_api_integration_tests/test_evaluations_for_vm.py +++ b/application/test/evaluation_api_integration_tests/test_evaluations_for_vm.py @@ -192,7 +192,9 @@ def setUpClass(cls): def test_existing_upload_of_for_user_with_all_published(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_1, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_1, url, params={"task_id": "", "vm_id": ""} + ) global UPLOAD request.GET["upload_id"] = str(UPLOAD.id) @@ -204,7 +206,7 @@ def test_existing_upload_of_for_user_with_all_published(self): def test_for_non_existing_docker_software(self): # Arrange - request = mock_request(ADMIN, url) + request = mock_request(ADMIN, url, params={"task_id": "", "vm_id": ""}) request.GET["docker_software_id"] = "-1212" # Act @@ -215,7 +217,9 @@ def test_for_non_existing_docker_software(self): def test_existing_docker_software_of_wrong_user_01(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_2, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_2, url, params={"task_id": "", "vm_id": ""} + ) request.GET["docker_software_id"] = SOFTWARE_IDS[SOFTWARE_PARTICIPANT_1] # Act @@ -226,7 +230,9 @@ def test_existing_docker_software_of_wrong_user_01(self): def test_existing_docker_software_of_wrong_user_02(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_1, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_1, url, params={"task_id": "", "vm_id": ""} + ) request.GET["docker_software_id"] = SOFTWARE_IDS[SOFTWARE_PARTICIPANT_2] # Act @@ -237,7 +243,9 @@ def test_existing_docker_software_of_wrong_user_02(self): def test_existing_docker_software_of_for_user_with_all_published(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_1, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_1, url, params={"task_id": "", "vm_id": ""} + ) request.GET["docker_software_id"] = SOFTWARE_IDS[SOFTWARE_PARTICIPANT_1] # Act @@ -248,7 +256,9 @@ def test_existing_docker_software_of_for_user_with_all_published(self): def test_existing_docker_software_of_for_user_with_one_published_on_train_data(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_2, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_2, url, params={"task_id": "", "vm_id": ""} + ) request.GET["docker_software_id"] = SOFTWARE_IDS[SOFTWARE_PARTICIPANT_2] # Act @@ -259,7 +269,9 @@ def test_existing_docker_software_of_for_user_with_one_published_on_train_data(s def test_existing_docker_software_of_for_user_with_one_published_on_test_data(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_3, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_3, url, params={"task_id": "", "vm_id": ""} + ) request.GET["docker_software_id"] = SOFTWARE_IDS[SOFTWARE_PARTICIPANT_3] # Act @@ -270,7 +282,9 @@ def test_existing_docker_software_of_for_user_with_one_published_on_test_data(se def test_existing_docker_software_of_for_user_with_none_published_on_test_data(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_4, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_4, url, params={"task_id": "", "vm_id": ""} + ) request.GET["docker_software_id"] = SOFTWARE_IDS[SOFTWARE_PARTICIPANT_4] # Act @@ -281,7 +295,9 @@ def test_existing_docker_software_of_for_user_with_none_published_on_test_data(s def test_existing_docker_software_of_for_user_with_none_published_on_test_data_and_no_evaluations(self): # Arrange - request = mock_request("tira_vm_" + PARTICIPANT_5, url) + request = mock_request( + "tira_vm_" + PARTICIPANT_5, url, params={"task_id": "does-not-exist", "vm_id": "does-not-exist"} + ) request.GET["docker_software_id"] = SOFTWARE_IDS[SOFTWARE_PARTICIPANT_5] # Act diff --git a/application/test/hf_mount_model_tests/test_hf_mounts_are_parsed.py b/application/test/hf_mount_model_tests/test_hf_mounts_are_parsed.py index e171ef9a3..cf0640e8a 100644 --- a/application/test/hf_mount_model_tests/test_hf_mounts_are_parsed.py +++ b/application/test/hf_mount_model_tests/test_hf_mounts_are_parsed.py @@ -1,19 +1,14 @@ -import os import unittest -from tira.huggingface_hub_integration import TIRA_HOST_HF_HOME, _hf_repos, huggingface_model_mounts +from huggingface_hub import snapshot_download -os.environ["HF_HOME"] = TIRA_HOST_HF_HOME +from tira.huggingface_hub_integration import _hf_repos, huggingface_model_mounts class TestHfMountsAreParsed(unittest.TestCase): def fail_if_hf_is_not_installed(self): - os.environ["HF_HOME"] = TIRA_HOST_HF_HOME - from huggingface_hub import snapshot_download - snapshot_download(repo_id="prajjwal1/bert-tiny") - self.assertTrue(len(_hf_repos()) > 0) - del os.environ["HF_HOME"] + self.assertGreater(len(_hf_repos()), 0) def test_hf_is_installed(self): self.fail_if_hf_is_not_installed() @@ -40,8 +35,6 @@ def test_non_existing_hf_models_can_not_be_mounted(self): def test_existing_hf_model_can_be_mounted(self): self.fail_if_hf_is_not_installed() - os.environ["HF_HOME"] = TIRA_HOST_HF_HOME actual = huggingface_model_mounts(["prajjwal1/bert-tiny"]) - del os.environ["HF_HOME"] self.assertEqual("prajjwal1/bert-tiny", actual["MOUNT_HF_MODEL"]) diff --git a/application/test/settings_test.py b/application/test/settings_test.py index 8f16e96f4..1a880fecc 100644 --- a/application/test/settings_test.py +++ b/application/test/settings_test.py @@ -62,13 +62,14 @@ INSTALLED_APPS = [ "tira.apps.TiraConfig", - "django.contrib.admin", "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.messages", - "django.contrib.staticfiles", "django_extensions", + "django_filters", + "rest_framework", + "rest_framework_json_api", ] MIDDLEWARE = [ @@ -81,6 +82,11 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", ] +REST_FRAMEWORK = { + "DEFAULT_AUTHENTICATION_CLASSES": ("tira.authentication.TrustedHeaderAuthentication",), + "DEFAULT_FILTER_BACKENDS": ("rest_framework_json_api.django_filters.DjangoFilterBackend",), +} + ROOT_URLCONF = "django_admin.urls" TEMPLATES = [ @@ -298,15 +304,6 @@ def logger_config(log_dir: Path): USE_TZ = True -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.1/howto/static-files/ - -STATIC_URL = "/public/" - -STATICFILES_DIRS = [BASE_DIR / "src" / "static/", BASE_DIR / "src" / "tira" / "static/"] - -STATIC_ROOT = "/var/www/public" - TIREX_COMPONENTS = yaml.load(open(BASE_DIR / "src" / "tirex-components.yml").read(), Loader=yaml.FullLoader) GIT_CI_AVAILABLE_RESOURCES = { diff --git a/application/test/task_endpoint_integration_tests/test_task_endpoint.py b/application/test/task_endpoint_integration_tests/test_task_endpoint.py index e1c8580c5..c9b461059 100644 --- a/application/test/task_endpoint_integration_tests/test_task_endpoint.py +++ b/application/test/task_endpoint_integration_tests/test_task_endpoint.py @@ -15,7 +15,7 @@ def setUpClass(cls): def test_response_for_non_existing_task(self): # Arrange - request = mock_request(GUEST, "api/task/") + request = mock_request(GUEST, "api/task/", params={"task_id": ""}) # Act actual = task_function(request, task_id="task-does-not-exist") @@ -25,7 +25,7 @@ def test_response_for_non_existing_task(self): def test_result_for_existing_task(self): # Arrange - request = mock_request(GUEST, "api/task/") + request = mock_request(GUEST, "api/task/", params={"task_id": ""}) # Act actual = task_function(request, task_id="task-of-organizer-1") @@ -35,7 +35,11 @@ def test_result_for_existing_task(self): def test_upload_submissions_for_non_existing_task(self): # Arrange - request = mock_request(ADMIN, "api/submissions-for-task///") + request = mock_request( + ADMIN, + "api/submissions-for-task///", + params={"task_id": "", "user_id": "", "submission_type": ""}, + ) # Act actual = submission_function( @@ -47,7 +51,11 @@ def test_upload_submissions_for_non_existing_task(self): def test_software_submissions_for_existing_task(self): # Arrange - request = mock_request(ADMIN, "api/submissions-for-task///") + request = mock_request( + ADMIN, + "api/submissions-for-task///", + params={"task_id": "", "user_id": "", "submission_type": ""}, + ) # Act actual = submission_function( @@ -59,7 +67,11 @@ def test_software_submissions_for_existing_task(self): def test_upload_submissions_for_existing_task(self): # Arrange - request = mock_request(ADMIN, "api/submissions-for-task///") + request = mock_request( + ADMIN, + "api/submissions-for-task///", + params={"task_id": "", "user_id": "", "submission_type": ""}, + ) # Act actual = submission_function( @@ -71,7 +83,11 @@ def test_upload_submissions_for_existing_task(self): def test_software_submissions_for_existing_task_and_user_without_software(self): # Arrange - request = mock_request(ADMIN, "api/submissions-for-task///") + request = mock_request( + ADMIN, + "api/submissions-for-task///", + params={"task_id": "", "user_id": "", "submission_type": ""}, + ) # Act actual = submission_function( diff --git a/application/test/test-coverage/.coverage b/application/test/test-coverage/.coverage index 4c3072b05..7b29d3316 100644 Binary files a/application/test/test-coverage/.coverage and b/application/test/test-coverage/.coverage differ diff --git a/application/test/test_accessibility_of_all_endpoints.py b/application/test/test_accessibility_of_all_endpoints.py new file mode 100644 index 000000000..1ba60366f --- /dev/null +++ b/application/test/test_accessibility_of_all_endpoints.py @@ -0,0 +1,170 @@ +from api_access_matrix import ADMIN, GUEST, ORGANIZER, ORGANIZER_WRONG_TASK, PARTICIPANT, access_matrix_for_user +from django.test import TestCase +from parameterized import parameterized # , parameterized_class +from utils_for_testing import ( + assert_all_url_patterns_are_tested, + execute_method_behind_url_and_return_status_code, + set_up_tira_environment, +) + +# TODO: I leave this here since it should work (but does not); at some point (something like) this should replace all +# the other classes below (TestAccessibilityOfEndpointsForAdminUser, ...) +""" +@parameterized_class(("user",), [(ADMIN,), (GUEST,), (ORGANIZER,), (ORGANIZER_WRONG_TASK,), (PARTICIPANT,)]) +class TestAccessibilityOfEndpoints(TestCase): + @classmethod + def setUpClass(cls): + set_up_tira_environment() + + def test_route(self): + tested_urls = [] + for ( + url_pattern, + method_bound_to_url_pattern, + request, + expected_status_code, + hide_stdout, + ) in access_matrix_for_user(self.user): + status_code = execute_method_behind_url_and_return_status_code( + method_bound_to_url_pattern, request, hide_stdout + ) + + self.assertEqual( + status_code, + expected_status_code, + f"{request['request'].method} {url_pattern} yielded {status_code} for user '{self.user}'; Expected: {expected_status_code}", + ) + + tested_urls.append(url_pattern) + + assert_all_url_patterns_are_tested(tested_urls) + + @classmethod + def tearDownClass(cls): + pass +""" + + +class TestAccessibilityOfEndpointsForAdminUser(TestCase): + @classmethod + def setUpClass(cls): + cls.tested_urls = [] + set_up_tira_environment() + + @parameterized.expand(access_matrix_for_user(ADMIN)) + def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): + status_code = execute_method_behind_url_and_return_status_code( + method_bound_to_url_pattern, request, hide_stdout + ) + + self.assertEqual( + status_code, + expected_status_code, + f"{request['request'].method} {url_pattern} yielded {status_code}; Expected: {expected_status_code}", + ) + + self.tested_urls += [url_pattern] + + @classmethod + def tearDownClass(cls): + assert_all_url_patterns_are_tested(cls.tested_urls) + + +class TestAccessibilityOfEndpointsForGuestUser(TestCase): + @classmethod + def setUpClass(cls): + cls.tested_urls = [] + set_up_tira_environment() + + @parameterized.expand(access_matrix_for_user(GUEST)) + def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): + status_code = execute_method_behind_url_and_return_status_code( + method_bound_to_url_pattern, request, hide_stdout + ) + + self.assertEqual( + status_code, + expected_status_code, + f"{request['request'].method} {url_pattern} yielded {status_code}; Expected: {expected_status_code}", + ) + + self.tested_urls += [url_pattern] + + @classmethod + def tearDownClass(cls): + assert_all_url_patterns_are_tested(cls.tested_urls) + + +class TestAccessibilityOfEndpointsForOrganizerWrongUser(TestCase): + @classmethod + def setUpClass(cls): + cls.tested_urls = [] + set_up_tira_environment() + + @parameterized.expand(access_matrix_for_user(ORGANIZER_WRONG_TASK)) + def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): + status_code = execute_method_behind_url_and_return_status_code( + method_bound_to_url_pattern, request, hide_stdout + ) + + self.assertEqual( + status_code, + expected_status_code, + f"{request['request'].method} {url_pattern} yielded {status_code}; Expected: {expected_status_code}", + ) + + self.tested_urls += [url_pattern] + + @classmethod + def tearDownClass(cls): + assert_all_url_patterns_are_tested(cls.tested_urls) + + +class TestAccessibilityOfEndpointsForOrganizerUser(TestCase): + @classmethod + def setUpClass(cls): + cls.tested_urls = [] + set_up_tira_environment() + + @parameterized.expand(access_matrix_for_user(ORGANIZER)) + def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): + status_code = execute_method_behind_url_and_return_status_code( + method_bound_to_url_pattern, request, hide_stdout + ) + + self.assertEqual( + status_code, + expected_status_code, + f"{request['request'].method} {url_pattern} yielded {status_code}; Expected: {expected_status_code}", + ) + + self.tested_urls += [url_pattern] + + @classmethod + def tearDownClass(cls): + assert_all_url_patterns_are_tested(cls.tested_urls) + + +class TestAccessibilityOfEndpointsForParticipantUser(TestCase): + @classmethod + def setUpClass(cls): + cls.tested_urls = [] + set_up_tira_environment() + + @parameterized.expand(access_matrix_for_user(PARTICIPANT)) + def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): + status_code = execute_method_behind_url_and_return_status_code( + method_bound_to_url_pattern, request, hide_stdout + ) + + self.assertEqual( + status_code, + expected_status_code, + f"{request['request'].method} {url_pattern} yielded {status_code}; Expected: {expected_status_code}", + ) + + self.tested_urls += [url_pattern] + + @classmethod + def tearDownClass(cls): + assert_all_url_patterns_are_tested(cls.tested_urls) diff --git a/application/test/test_accessibility_of_all_endpoints_for_admin_user.py b/application/test/test_accessibility_of_all_endpoints_for_admin_user.py deleted file mode 100644 index 123ee5c6a..000000000 --- a/application/test/test_accessibility_of_all_endpoints_for_admin_user.py +++ /dev/null @@ -1,31 +0,0 @@ -from api_access_matrix import ADMIN, access_matrix_for_user -from django.test import TestCase -from parameterized import parameterized -from utils_for_testing import ( - assert_all_url_patterns_are_tested, - execute_method_behind_url_and_return_status_code, - set_up_tira_environment, -) - - -class TestAccessibilityOfEndpointsForAdminUser(TestCase): - @classmethod - def setUpClass(cls): - cls.tested_urls = [] - set_up_tira_environment() - - @parameterized.expand(access_matrix_for_user(ADMIN)) - def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): - status_code = execute_method_behind_url_and_return_status_code( - method_bound_to_url_pattern, request, hide_stdout - ) - - assert ( - status_code == expected_status_code - ), f"Expected response for url_pattern {url_pattern} is {expected_status_code}. But I got {status_code}" - - self.tested_urls += [url_pattern] - - @classmethod - def tearDownClass(cls): - assert_all_url_patterns_are_tested(cls.tested_urls) diff --git a/application/test/test_accessibility_of_all_endpoints_for_guest_user.py b/application/test/test_accessibility_of_all_endpoints_for_guest_user.py deleted file mode 100644 index 68f779479..000000000 --- a/application/test/test_accessibility_of_all_endpoints_for_guest_user.py +++ /dev/null @@ -1,31 +0,0 @@ -from api_access_matrix import GUEST, access_matrix_for_user -from django.test import TestCase -from parameterized import parameterized -from utils_for_testing import ( - assert_all_url_patterns_are_tested, - execute_method_behind_url_and_return_status_code, - set_up_tira_environment, -) - - -class TestAccessibilityOfEndpointsForGuestUser(TestCase): - @classmethod - def setUpClass(cls): - cls.tested_urls = [] - set_up_tira_environment() - - @parameterized.expand(access_matrix_for_user(GUEST)) - def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): - status_code = execute_method_behind_url_and_return_status_code( - method_bound_to_url_pattern, request, hide_stdout - ) - - assert ( - status_code == expected_status_code - ), f"Expected response for url_pattern {url_pattern} is {expected_status_code}. But I got {status_code}" - - self.tested_urls += [url_pattern] - - @classmethod - def tearDownClass(cls): - assert_all_url_patterns_are_tested(cls.tested_urls) diff --git a/application/test/test_accessibility_of_all_endpoints_for_organizer_of_wrong_task_user.py b/application/test/test_accessibility_of_all_endpoints_for_organizer_of_wrong_task_user.py deleted file mode 100644 index 55f81d210..000000000 --- a/application/test/test_accessibility_of_all_endpoints_for_organizer_of_wrong_task_user.py +++ /dev/null @@ -1,31 +0,0 @@ -from api_access_matrix import ORGANIZER_WRONG_TASK, access_matrix_for_user -from django.test import TestCase -from parameterized import parameterized -from utils_for_testing import ( - assert_all_url_patterns_are_tested, - execute_method_behind_url_and_return_status_code, - set_up_tira_environment, -) - - -class TestAccessibilityOfEndpointsForOrganizerWrongUser(TestCase): - @classmethod - def setUpClass(cls): - cls.tested_urls = [] - set_up_tira_environment() - - @parameterized.expand(access_matrix_for_user(ORGANIZER_WRONG_TASK)) - def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): - status_code = execute_method_behind_url_and_return_status_code( - method_bound_to_url_pattern, request, hide_stdout - ) - - assert ( - status_code == expected_status_code - ), f"Expected response for url_pattern {url_pattern} is {expected_status_code}. But I got {status_code}." - - self.tested_urls += [url_pattern] - - @classmethod - def tearDownClass(cls): - assert_all_url_patterns_are_tested(cls.tested_urls) diff --git a/application/test/test_accessibility_of_all_endpoints_for_organizer_user.py b/application/test/test_accessibility_of_all_endpoints_for_organizer_user.py deleted file mode 100644 index 666ef4ef4..000000000 --- a/application/test/test_accessibility_of_all_endpoints_for_organizer_user.py +++ /dev/null @@ -1,31 +0,0 @@ -from api_access_matrix import ORGANIZER, access_matrix_for_user -from django.test import TestCase -from parameterized import parameterized -from utils_for_testing import ( - assert_all_url_patterns_are_tested, - execute_method_behind_url_and_return_status_code, - set_up_tira_environment, -) - - -class TestAccessibilityOfEndpointsForOrganizerUser(TestCase): - @classmethod - def setUpClass(cls): - cls.tested_urls = [] - set_up_tira_environment() - - @parameterized.expand(access_matrix_for_user(ORGANIZER)) - def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): - status_code = execute_method_behind_url_and_return_status_code( - method_bound_to_url_pattern, request, hide_stdout - ) - - assert ( - status_code == expected_status_code - ), f"Expected response for url_pattern {url_pattern} is {expected_status_code}. But I got {status_code}" - - self.tested_urls += [url_pattern] - - @classmethod - def tearDownClass(cls): - assert_all_url_patterns_are_tested(cls.tested_urls) diff --git a/application/test/test_accessibility_of_all_endpoints_for_participant_user.py b/application/test/test_accessibility_of_all_endpoints_for_participant_user.py deleted file mode 100644 index 2461b35ab..000000000 --- a/application/test/test_accessibility_of_all_endpoints_for_participant_user.py +++ /dev/null @@ -1,32 +0,0 @@ -from api_access_matrix import PARTICIPANT, access_matrix_for_user -from django.test import TestCase -from parameterized import parameterized -from utils_for_testing import ( - assert_all_url_patterns_are_tested, - execute_method_behind_url_and_return_status_code, - set_up_tira_environment, -) - - -class TestAccessibilityOfEndpointsForParticipantUser(TestCase): - @classmethod - def setUpClass(cls): - cls.tested_urls = [] - set_up_tira_environment() - - @parameterized.expand(access_matrix_for_user(PARTICIPANT)) - def test_route(self, url_pattern, method_bound_to_url_pattern, request, expected_status_code, hide_stdout): - status_code = execute_method_behind_url_and_return_status_code( - method_bound_to_url_pattern, request, hide_stdout - ) - - assert status_code == expected_status_code, ( - f"Expected response for url_pattern {url_pattern} is {expected_status_code}. But I got {status_code} for" - f" {request}" - ) - - self.tested_urls += [url_pattern] - - @classmethod - def tearDownClass(cls): - assert_all_url_patterns_are_tested(cls.tested_urls) diff --git a/application/test/utils_for_testing.py b/application/test/utils_for_testing.py index dc5b71714..6ed4eb4f8 100644 --- a/application/test/utils_for_testing.py +++ b/application/test/utils_for_testing.py @@ -1,18 +1,26 @@ import gzip import io import os +import re import shutil from contextlib import redirect_stderr, redirect_stdout from copy import deepcopy from datetime import datetime from pathlib import Path +from typing import Any, Iterable, Optional, Union +from django.conf import settings from django.core.management import call_command -from mockito import mock +from django.http import HttpRequest +from django.http.request import QueryDict +from django.urls import URLPattern, URLResolver +from rest_framework.test import APIRequestFactory import tira.model as modeldb +from tira.authentication import TrustedHeaderAuthentication from tira.tira_model import model as tira_model -from tira.urls import urlpatterns + +auth_backend = TrustedHeaderAuthentication() # There must be a way to get this from rest_framework right? # Used for some tests now = datetime.now().strftime("%Y%m%d") @@ -290,62 +298,49 @@ def set_up_tira_environment(): tira_model.add_run(dataset_id="dataset-of-organizer", vm_id="example_participant", run_id="run-of-organizer") -def mock_request(groups, url_pattern, method="GET", body=None, params=None): - if "DISRAPTOR_APP_SECRET_KEY" not in os.environ: - os.environ["DISRAPTOR_APP_SECRET_KEY"] = "my-disraptor-key" - ret = mock() - ret.headers = { - "X-Disraptor-App-Secret-Key": "my-disraptor-key", - "X-Disraptor-User": "ignored-user.", - "X-Disraptor-Groups": groups, - } - ret.path_info = "/" + url_pattern - - ret.GET = {} - - if params and "organizer_id" in params and "" in ret.path_info: - ret.path_info = ret.path_info.replace("", params["organizer_id"]) - - if params and "dataset_id" in params and "" in ret.path_info: - ret.path_info = ret.path_info.replace("", str(params["dataset_id"])) +def __resolve_path(url_pattern: str, params: Optional[dict[str, Any]] = None) -> str: + """ + Replaces django template variables with their value from params - if params and "vm_id" in params and "" in ret.path_info: - ret.path_info = ret.path_info.replace("", params["vm_id"]) + >>> __resolve_path("v1/runs//review", {"run_id": "blah"}) + /v1/runs/blah/review + """ - if params and "run_id" in params and "" in ret.path_info: - ret.path_info = ret.path_info.replace("", params["run_id"]) + def _replace_with_value(match: re.Match) -> str: + assert params is not None, "Keys were present but no dictionary was given" + return str(params[match.group(2)]) - if params and "task_id" in params and "" in ret.path_info: - ret.path_info = ret.path_info.replace("", params["task_id"]) + return re.sub(r"<(\w+):(\w+)>", _replace_with_value, f"/{url_pattern}") - if params and "software_name" in params and "" in ret.path_info: - ret.path_info = ret.path_info.replace("", params["software_name"]) - ret.META = { +def mock_request( + groups: str, url_pattern: str, method="GET", body: Optional[dict] = None, params: Optional[dict] = None +) -> HttpRequest: + path = __resolve_path(url_pattern, params) + # Stuff prefixed with HTTP_ will be added to the headers and to META otherwise + headers = { + "HTTP_X-Disraptor-App-Secret-Key": os.getenv("DISRAPTOR_APP_SECRET_KEY"), + "HTTP_X-Disraptor-User": "ignored-user.", + "HTTP_X-Disraptor-Groups": groups, "CSRF_COOKIE": "aasa", } - ret.current_app = "tira" - if method: - ret.method = method - if body: - ret.body = body - + factory = APIRequestFactory() + ret = factory.generic(method=method, path=path, data=body if body is not None else "", **headers) + assert isinstance(ret, HttpRequest) + # These should be empty anyway from the code above but are immutable. Override to make them mutable + ret.GET = QueryDict("", mutable=True) + ret.POST = QueryDict("", mutable=True) return ret -def method_for_url_pattern(url_pattern): - method_bound_to_url_pattern = None - - for pattern in urlpatterns: - if str(url_pattern) == str(pattern.pattern): - method_bound_to_url_pattern = pattern.callback - - assert method_bound_to_url_pattern, f'No method is bound to pattern "{url_pattern}".' - - return method_bound_to_url_pattern +def method_for_url_pattern(url_pattern: str): + patterns = {f"{pre}{pat.pattern}": pat for pre, pat in get_django_url_patterns()} + return patterns[url_pattern].callback -def route_to_test(url_pattern, params, group_to_expected_status_code, method="GET", hide_stdout=False, body=None): +def route_to_test( + url_pattern, params, group_to_expected_status_code: dict[str, int], method="GET", hide_stdout=False, body=None +): metadata_for_groups = {} for group, expected_status_code in group_to_expected_status_code.items(): @@ -369,7 +364,63 @@ def execute_method_behind_url_and_return_status_code(method_bound_to_url_pattern return ret.status_code -def assert_all_url_patterns_are_tested(tested_url_patterns): - tested_url_patterns = set(tested_url_patterns) - for url_pattern in urlpatterns: - assert str(url_pattern.pattern) in tested_url_patterns, f'The pattern "{url_pattern.pattern}" is not tested.' +def __django_url_patterns(resolver: URLResolver, prefix: str = "") -> Iterable[tuple[str, URLPattern]]: + """Iterates all URLPatterns resolved by the provided ``resolver`` and their URLs. + + Args: + resolver (URLResolver): The resolver for which to fetch the urls and their associated ``URLPattern``. + prefix (str, optional): An optional prefix to prepend to all paths. Defaults to "". + + Raises: + TypeError: Raised if an unexpected datatype is found to be resolved by the ``URLResolver``. This error should + not be captured since it is an internal problem. + + Returns: + Iterable[tuple[str, URLPattern]]: An iterable of paths and the URLPattern they are resolved to. + """ + for p in resolver.url_patterns: + if isinstance(p, URLPattern): + yield prefix, p + elif isinstance(p, URLResolver): + yield from __django_url_patterns(p, f"{prefix}{p.pattern}") + else: + raise TypeError(f"Unexpected entry-type in urlpatterns for {p}") + + +def get_django_url_patterns( + urlpatterns: Optional[list[Union[URLResolver, URLPattern]]] = None +) -> Iterable[tuple[str, URLPattern]]: + """Returns an iterable of all configured django endpoints. + + Args: + urlpatterns (Optional[list[Union[URLResolver, URLPattern]]], optional): A list of the url patterns to extract + all configured endpoints on. If None, the endpoints that are configured for django will be used. Defaults + to None. + + Raises: + TypeError: Raised if an unexpected datatype is found to be resolved by the ``URLResolver``. This error should + not be captured since it is an internal problem. + + Returns: + Iterable[tuple[str, URLPattern]]: An iterable of paths and the URLPattern they are resolved to. + """ + if urlpatterns is None: + urlconf = __import__(settings.ROOT_URLCONF, {}, {}, [""]) + urlpatterns = urlconf.urlpatterns + assert isinstance(urlpatterns, list) + + for p in urlpatterns: + if isinstance(p, URLPattern): + yield "", p + elif isinstance(p, URLResolver): + yield from __django_url_patterns(p, p.pattern) + else: + raise TypeError(f"Unexpected entry-type in urlpatterns for {p}") + + +def assert_all_url_patterns_are_tested(tested_url_patterns: Iterable[str]): + """ + Asserts that tested_url_patterns is identical or a superset to all the endpoints registered with django. + """ + untested = set(f"{pre}{pat.pattern}" for pre, pat in get_django_url_patterns()).difference(tested_url_patterns) + assert len(untested) == 0, f"{len(untested)} patterns are untested: {untested}; tested: {tested_url_patterns}" diff --git a/documentation/development/frontend/index.rst b/documentation/development/frontend/index.rst index 36c883907..60af05e44 100644 --- a/documentation/development/frontend/index.rst +++ b/documentation/development/frontend/index.rst @@ -1,24 +1,72 @@ Frontend Development ==================== -Getting Started ---------------- +.. hint:: This page generally describes **two ways** of achieving the same thing: Using VSCode and using the shell. For + general development, we **highly recommend** using the VSCode integration unless it does not work for you or a + shell is required. Simply set the tab to the version you will work with, they a synchronized through the magic of + technology. + + + +.. rubric:: Getting Started + To get started, please read and follow the instructions of the :ref:`DevEnvPage` section. -Code Testing ------------- -Open a shell within the ``frontend`` directory and run -.. code:: bash +.. tab-set:: + + .. tab-item:: VSCode + + .. _launchfrontend: + + .. dropdown:: :octicon:`rocket` Launching the Frontend + + .. todo:: TODO + + .. dropdown:: :octicon:`beaker` Code Testing + + .. todo:: TODO + + .. dropdown:: :fab:`hammer` Building the Static Frontend + + .. attention:: This step can only be performed via shell and is only really useful for deployment. If + you simply want to launch the frontend for debugging, have a look at + :ref:`Launching the Frontend `. - yarn test + .. tab-item:: Shell + + .. dropdown:: :octicon:`rocket` Launching the Frontend + + Open a shell within the ``frontend`` directory and run + + .. code:: bash + + yarn dev + + .. dropdown:: :octicon:`beaker` Code Testing + + Open a shell within the ``frontend`` directory and run + + .. code:: bash + + yarn test + + .. dropdown:: :fab:`hammer` Building the Static Frontend + + Open a shell within the ``frontend`` directory and either run + + .. code:: bash + + yarn build-light + + + +.. rubric:: Linting + +.. note:: We don't currently have any linters -Linting -------- -.. todo:: We don't currently have any linters -FAQ ---- +.. rubric:: Frequently Asked Questions -Yay, no questions yet. \ No newline at end of file +No questions yet :material-regular:`mood;1.5em;sd-text-success`. diff --git a/documentation/development/specification/rest-api.yml b/documentation/development/specification/rest-api.yml index 6fd0aedc0..0d1c3321b 100644 --- a/documentation/development/specification/rest-api.yml +++ b/documentation/development/specification/rest-api.yml @@ -1,11 +1,10 @@ -openapi: '3.0.2' +openapi: "3.0.2" info: title: TIRA API version: "1.0.0-draft" servers: - url: https://www.tira.io/api # - url: https://api.tira.io/ <-- maybe for the future? - - url: http://127.0.0.1:8080/api security: - bearerAuth: [] @@ -21,7 +20,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/reload/datasets: @@ -34,7 +33,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/reload/tasks: @@ -47,7 +46,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/reload-data: @@ -60,7 +59,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/reload-runs/{vmId}: @@ -73,7 +72,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/create-vm: @@ -86,7 +85,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/archive-vm: @@ -99,7 +98,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/modify-vm: @@ -112,7 +111,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /tira-admin/export-participants/{taskId}.csv: @@ -125,7 +124,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/{organizerId}/create-task: @@ -138,7 +137,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/edit-task/{taskId}: @@ -151,7 +150,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/delete-task/{taskId}: @@ -164,7 +163,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/add-dataset/{taskId}: @@ -177,7 +176,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/upload-dataset/{taskId}/{datasetId}/{datasetType}: @@ -190,7 +189,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/imports-irds-dataset/{taskId}: @@ -203,7 +202,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/edit-dataset/{datasetId}: @@ -216,7 +215,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/delete-dataset/{datasetId}: @@ -229,7 +228,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/add-organizer/{organizerId}: @@ -242,7 +241,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/edit-organizer/{organizerId}: @@ -255,7 +254,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/edit-review/{datasetId}/{vmId}/{runId}: @@ -268,7 +267,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tira-admin/create-group/{vmId}: @@ -281,7 +280,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] @@ -295,7 +294,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /blind/{vmId}/{datasetId}/{runId}/{value}: @@ -308,7 +307,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] @@ -323,7 +322,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy] /evaluations-of-vm/{taskId}/{vmId}: @@ -335,7 +334,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /evaluation/{vmId}/{runId}: @@ -346,7 +345,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /submissions/{taskId}/{datasetId}: @@ -355,7 +354,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy] /docker-softwares-details/{vmId}/{dockerSoftwareId}: @@ -376,7 +375,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /upload-group-details/{taskId}/{vmId}/{uploadId}: @@ -384,7 +383,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /evaluations_of_run/{vmId}/{runId}: @@ -395,7 +394,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /configuration-of-evaluation/{taskId}/{datasetId}: @@ -420,7 +419,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO content: application/json: @@ -462,7 +461,7 @@ paths: /ir_measures_evaluator.py --run ${inputRun}/run.txt --topics ${inputDataset}/queries.jsonl --qrels ${inputDataset}/qrels.txt --output_path ${outputDir} --measures "P@10" "nDCG@10" "MRR" - tags: [legacy,redundant] + tags: [legacy, redundant] /list-runs/{taskId}/{datasetId}/{vmId}/{softwareId}: get: summary: | @@ -472,7 +471,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy] /ova-list: @@ -482,19 +481,9 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] - /host-list: - get: - summary: Deprecated, use /v1/hosts instead. - deprecated: true - security: - - LegacyApiKeyAuth: [] - responses: - '200': - description: TODO - tags: [legacy] /organizer-list: get: summary: Deprecated, use /v1/organizers instead. @@ -502,7 +491,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /task-list: @@ -511,7 +500,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO content: application/json: @@ -533,7 +522,7 @@ paths: enum: - "guest" organizer_teams: - type: string # TODO: this is really odd that it is a string and not a list + type: string # TODO: this is really odd that it is a string and not a list task_list: type: array items: @@ -552,7 +541,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO content: application/json: @@ -574,17 +563,17 @@ paths: enum: - "guest" organizer_teams: - type: string # TODO: this is really odd that it is a string and not a list + type: string # TODO: this is really odd that it is a string and not a list task: $ref: "#/components/schemas/TaskLegacy" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [legacy] /registration_formular/{taskId}: get: security: [] responses: - '200': + "200": description: TODO tags: [legacy] /dataset/{datasetId}: @@ -593,7 +582,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy] /datasets_by_task/{taskId}: @@ -602,7 +591,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy] /organizer/{organizerId}: @@ -618,7 +607,7 @@ paths: type: string security: [] responses: - '200': + "200": description: TODO tags: [legacy] /role: @@ -627,12 +616,12 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO content: application/json: schema: - $ref: '#/components/schemas/Role' + $ref: "#/components/schemas/Role" tags: [legacy] /task/{taskId}/user/{userId}: get: @@ -641,36 +630,36 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [legacy] /task/{taskId}/user/{userId}/refresh-docker-images: get: security: [] responses: - '200': + "200": description: TODO - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [legacy] /count-of-missing-reviews/{taskId}: get: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /task/{taskId}/user/{userId}/software/running/{forceCacheRefresh}: get: security: [] responses: - '200': + "200": description: TODO - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [legacy] /task/{taskId}/public-submissions: get: @@ -679,19 +668,19 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [legacy] /task/{taskId}/submission-details/{userId}/{software}: get: security: [] responses: - '200': + "200": description: TODO - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [legacy] /review/{datasetId}/{vmId}/{runId}: get: @@ -701,10 +690,10 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [legacy] /registration/add_registration/{vmId}/{taskId}: parameters: @@ -720,7 +709,7 @@ paths: required: true schema: type: string - post: # FIXME: why is this endpoint not protected? + post: # FIXME: why is this endpoint not protected? summary: Deprecated, use a POST on `/v1/tasks/{taskId}/registrations` instead description: TODO deprecated: true @@ -757,7 +746,7 @@ paths: questions: type: string responses: - '200': + "200": description: TODO tags: [legacy] /submissions-for-task/{taskId}/{userId}/{submissionType}: @@ -791,7 +780,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /tirex-components: @@ -800,7 +789,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy] /tirex-snippet: @@ -809,7 +798,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy] /snippets-for-tirex-components: @@ -818,7 +807,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO tags: [legacy, "dead?"] /re-ranking-datasets/{taskId}: @@ -836,7 +825,7 @@ paths: deprecated: true security: [] responses: - '200': + "200": description: TODO content: application/json: @@ -887,7 +876,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /add_software_submission_git_repository/{taskId}/{userId}: @@ -898,7 +887,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /get_software_submission_git_repository/{taskId}/{vmId}: @@ -909,7 +898,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /import-submission/{taskId}/{vmId}/{submissionType}/{softwareId}: @@ -921,7 +910,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /diffir/{taskId}/{topk}/{runId1}/{runId2}: @@ -932,7 +921,7 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] /serp/{taskId}/user/{vmId}/dataset/{datasetId}/{topk}/{runId}: @@ -943,26 +932,44 @@ paths: security: - LegacyApiKeyAuth: [] responses: - '200': + "200": description: TODO tags: [legacy] - - # V1 API /health: get: + summary: Check if TIRA is up and running security: [] responses: - '200': + "204": + description: Empty 2xx response to signal that the REST-API is being served. + tags: [v1] + /info: + get: + summary: General information about the running server + security: [] + responses: + "200": description: TODO + content: + application/json: + schema: + type: object + properties: + version: + type: string + description: The version of the TIRA server + restApiVersion: + type: string + description: The version of the REST-API that is served. tags: [v1] /v1: {} /v1/datasets: get: summary: Returns a (optionally filtered) list of all datasets parameters: - - name: filter # https://jsonapi.org/recommendations/#filtering + - name: filter # https://jsonapi.org/recommendations/#filtering in: query style: deepObject schema: @@ -970,18 +977,18 @@ paths: properties: taskId: type: string - - $ref: '#/components/parameters/cursor' - - $ref: '#/components/parameters/limit' + - $ref: "#/components/parameters/cursor" + - $ref: "#/components/parameters/limit" security: - {} - bearerAuth: [] responses: - '200': + "200": description: TODO - '204': - $ref: "#/components/responses/PaginationEmpty" - '400': - $ref: "#/components/responses/PaginationError" + "204": + $ref: "#/components/responses/PaginationEmpty" + "400": + $ref: "#/components/responses/PaginationError" tags: [v1] post: summary: Creates a new dataset and returns it @@ -1007,10 +1014,10 @@ paths: irDatasetsName: type: string responses: - '201': + "201": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] /v1/datasets/{datasetId}: parameters: @@ -1026,14 +1033,14 @@ paths: - {} - bearerAuth: [] responses: - '200': + "200": description: TODO content: application/json: schema: - $ref: '#/components/schemas/Task' - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + $ref: "#/components/schemas/Task" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] patch: summary: Updates the specified dataset @@ -1041,35 +1048,35 @@ paths: content: application/json-patch+json: {} responses: - '204': + "204": description: TODO content: application/json: schema: - $ref: '#/components/schemas/Task' - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + $ref: "#/components/schemas/Task" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] delete: summary: Deletes the specified dataset responses: - '204': + "204": description: TODO content: application/json: schema: - $ref: '#/components/schemas/Task' - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + $ref: "#/components/schemas/Task" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] /v1/evaluations: get: parameters: - - name: filter # https://jsonapi.org/recommendations/#filtering + - name: filter # https://jsonapi.org/recommendations/#filtering in: query style: deepObject schema: @@ -1083,57 +1090,41 @@ paths: type: string vmId: type: string - - $ref: '#/components/parameters/cursor' - - $ref: '#/components/parameters/limit' + - $ref: "#/components/parameters/cursor" + - $ref: "#/components/parameters/limit" responses: - '200': + "200": description: TODO - '204': - $ref: "#/components/responses/PaginationEmpty" - '400': - $ref: "#/components/responses/PaginationError" - '401': - $ref: "#/components/responses/UnauthorizedError" - tags: [v1] - /v1/hosts: - get: - parameters: - - $ref: '#/components/parameters/cursor' - - $ref: '#/components/parameters/limit' - responses: - '200': - description: TODO - '204': - $ref: "#/components/responses/PaginationEmpty" - '400': - $ref: "#/components/responses/PaginationError" - '401': - $ref: "#/components/responses/UnauthorizedError" + "204": + $ref: "#/components/responses/PaginationEmpty" + "400": + $ref: "#/components/responses/PaginationError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] /v1/organizers: get: summary: Returns a (optionally filtered) list of all organizers parameters: - - $ref: '#/components/parameters/cursor' - - $ref: '#/components/parameters/limit' + - $ref: "#/components/parameters/cursor" + - $ref: "#/components/parameters/limit" responses: - '201': + "201": description: TODO - '204': - $ref: "#/components/responses/PaginationEmpty" - '400': - $ref: "#/components/responses/PaginationError" - '401': - $ref: "#/components/responses/UnauthorizedError" + "204": + $ref: "#/components/responses/PaginationEmpty" + "400": + $ref: "#/components/responses/PaginationError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] post: summary: Creates a new organizer and returns its URL responses: - - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] /v1/organizers/{organizerId}: parameters: @@ -1146,12 +1137,12 @@ paths: get: summary: Returns the specified organizer responses: - '204': + "204": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] patch: summary: Updates the specified organizer @@ -1159,18 +1150,18 @@ paths: content: application/json-patch+json: {} responses: - '204': + "204": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] delete: summary: Deletes the specified organizer responses: - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] /v1/runs: {} /v1/runs/{runId}: @@ -1184,7 +1175,7 @@ paths: get: summary: Returns a (optionally filtered) list of all runs parameters: - - name: filter # https://jsonapi.org/recommendations/#filtering + - name: filter # https://jsonapi.org/recommendations/#filtering in: query style: deepObject schema: @@ -1202,12 +1193,12 @@ paths: schema: type: integer responses: - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] patch: summary: Updates the specified organizer @@ -1215,22 +1206,22 @@ paths: content: application/json-patch+json: {} responses: - '204': + "204": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] delete: summary: Deletes the specified organizer responses: - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] /v1/runs/{runId}/review: parameters: @@ -1243,12 +1234,12 @@ paths: get: summary: Returns the review of the specified run. responses: - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] patch: summary: Updates the specified review @@ -1256,45 +1247,45 @@ paths: content: application/json-patch+json: {} responses: - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] /v1/tasks: get: summary: Returns a (optionally filtered) list of all tasks parameters: - - $ref: '#/components/parameters/cursor' - - $ref: '#/components/parameters/limit' + - $ref: "#/components/parameters/cursor" + - $ref: "#/components/parameters/limit" security: - {} - bearerAuth: [] responses: - '200': + "200": description: TODO content: application/json: schema: type: array items: - $ref: '#/components/schemas/Task' - '204': - $ref: "#/components/responses/PaginationEmpty" - '400': - $ref: "#/components/responses/PaginationError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + $ref: "#/components/schemas/Task" + "204": + $ref: "#/components/responses/PaginationEmpty" + "400": + $ref: "#/components/responses/PaginationError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] post: summary: Creates a new task and returns its URL responses: - '201': + "201": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] /v1/tasks/{taskId}: parameters: @@ -1310,24 +1301,24 @@ paths: - {} - bearerAuth: [] responses: - '200': + "200": description: TODO content: application/json: schema: - $ref: '#/components/schemas/Task' - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + $ref: "#/components/schemas/Task" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] delete: summary: Deletes the specified task responses: - '204': + "204": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] /v1/tasks/{taskId}/evaluations: parameters: @@ -1353,12 +1344,12 @@ paths: runId: type: string responses: - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] /v1/tasks/{taskId}/registrations: parameters: @@ -1371,19 +1362,19 @@ paths: get: summary: Returns a list of all users registered for the task referenced by `taskId` responses: - '200': + "200": description: TODO content: text/csv: {} - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] post: summary: Registers a new user for the task referenced by `taskId` responses: - '201': + "201": description: The registrations was created successfully content: application/json: @@ -1399,10 +1390,10 @@ paths: description: | The newly created registration object. This should be identical to the one returned from the URI field. - '401': - $ref: "#/components/responses/UnauthorizedError" - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "401": + $ref: "#/components/responses/UnauthorizedError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] /v1/tasks/{taskId}/submissions: parameters: @@ -1437,18 +1428,18 @@ paths: - {} - bearerAuth: [] responses: - '200': + "200": description: TODO content: application/json: schema: type: array items: - $ref: '#/components/schemas/Submission' - '204': + $ref: "#/components/schemas/Submission" + "204": description: The request was successful but no submissions satisfy the filter criteria for the given task. - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] post: summary: Submits a new submission for the authenticated user @@ -1468,7 +1459,7 @@ paths: enum: - create submission: - $ref: '#/components/schemas/Submission' + $ref: "#/components/schemas/Submission" - type: object properties: action: @@ -1477,15 +1468,15 @@ paths: - import responses: - '201': + "201": description: The submission was successfully added. content: application/json: schema: - $ref: '#/components/schemas/Submission' - '202': + $ref: "#/components/schemas/Submission" + "202": description: The submission was successfully enqueued and will be processed later. It may not be successful. - '401': + "401": $ref: "#/components/responses/UnauthorizedError" tags: [v1] /v1/tirex: {} @@ -1494,7 +1485,7 @@ paths: summary: Returns a description of all tirex components security: [] responses: - '200': + "200": description: | The request was fulfilled successfully and the response contains a serialized list of all the components installed into this instance of TIREx. @@ -1514,14 +1505,14 @@ paths: summary: Returns a snippet demonstrating the usage of the specified TIREx component security: [] responses: - '200': + "200": description: | The request was fulfilled successfully and the response contains a plain-text snippet demonstrating the usage of the TIREx component uniquely identified by the query parameter `componentId`. content: text/plain: {} - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] /v1/tools: get: @@ -1530,11 +1521,11 @@ paths: - {} - bearerAuth: [] responses: - '200': + "200": description: | The request was fulfilled successfully and the response contains a list of the installed tools and a description of each. - '204': + "204": description: | The request was fulfilled successfully but the response is empty since there are no tools installed. tags: [v1] @@ -1545,17 +1536,17 @@ paths: - {} - bearerAuth: [] responses: - '200': + "200": description: The tool was invoked successfully and its response is attached. tags: [v1] /v1/user: get: summary: The user associated with the authenticated state responses: - '200': + "200": description: TODO - '401': - $ref: "#/components/responses/UnauthorizedError" + "401": + $ref: "#/components/responses/UnauthorizedError" tags: [v1] /v1/users: {} /v1/users/{userId}: {} @@ -1573,15 +1564,15 @@ paths: description: Returns all submissions of the specified user. security: [] responses: - '200': + "200": description: | The request was fulfilled successfully and the response contains a list of the user's submissions. content: application/json: {} - '204': + "204": description: The user was found but does not have any submissions yet. - '404': - $ref: "#/components/responses/UnauthorizedOrNotFoundError" + "404": + $ref: "#/components/responses/UnauthorizedOrNotFoundError" tags: [v1] components: @@ -1702,7 +1693,7 @@ components: url: type: string format: url - TaskLegacy: # Field names don't follow the naming guidelines (they are snake_case instead of camelCase) + TaskLegacy: # Field names don't follow the naming guidelines (they are snake_case instead of camelCase) type: object properties: task_id: @@ -1720,7 +1711,7 @@ components: format: uri year: type: string - pattern: '^\d{4}(?:-\d{4})?$' # YEAR or YEAR-YEAR + pattern: '^\d{4}(?:-\d{4})?$' # YEAR or YEAR-YEAR featured: type: boolean require_registration: @@ -1736,11 +1727,11 @@ components: type: string is_ir_task: type: boolean - irds_re_ranking_image: # TODO: is this only necessary for ir_tasks? Consider polymorphism + irds_re_ranking_image: # TODO: is this only necessary for ir_tasks? Consider polymorphism type: string - irds_re_ranking_command: # TODO: is this only necessary for ir_tasks? Consider polymorphism + irds_re_ranking_command: # TODO: is this only necessary for ir_tasks? Consider polymorphism type: string - irds_re_ranking_resource: # TODO: is this only necessary for ir_tasks? Consider polymorphism + irds_re_ranking_resource: # TODO: is this only necessary for ir_tasks? Consider polymorphism type: string dataset_count: type: integer @@ -1764,7 +1755,7 @@ components: type: integer max_file_list_chars_on_test_data_eval: type: integer - Task: # Updated version of TaskLegacy. Not yet done. TODO: complete + Task: # Updated version of TaskLegacy. Not yet done. TODO: complete type: object properties: id: @@ -1780,7 +1771,7 @@ components: format: uri year: type: string - pattern: '^\d{4}(?:-\d{4})?$' # YEAR or YEAR-YEAR + pattern: '^\d{4}(?:-\d{4})?$' # YEAR or YEAR-YEAR oneOf: - $ref: "#/components/schemas/BaseTask" - $ref: "#/components/schemas/IRTask" @@ -1819,7 +1810,7 @@ components: application/json: schema: allOf: - - $ref: '#/components/schemas/ProblemJSON' + - $ref: "#/components/schemas/ProblemJSON" - type: object properties: title: @@ -1838,14 +1829,14 @@ components: application/json: schema: allOf: - - $ref: '#/components/schemas/ProblemJSON' + - $ref: "#/components/schemas/ProblemJSON" - type: object properties: title: default: "Unauthorized" status: default: 401 - UnauthorizedOrNotFoundError: # https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.2 + UnauthorizedOrNotFoundError: # https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.2 description: | The resource was not found or the service does not want to disclose if the resource exists but the user does not have access to it. As per W3C this should be a 404 but can fulfill the purpose of a 403 internally. @@ -1853,20 +1844,20 @@ components: application/json: schema: allOf: - - $ref: '#/components/schemas/ProblemJSON' + - $ref: "#/components/schemas/ProblemJSON" - type: object properties: title: default: "Not Found" status: default: 404 - ForbiddenError: # https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.4 + ForbiddenError: # https://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.4 description: The service refuses to fulfill the request. Authorization would not help. content: application/json: schema: allOf: - - $ref: '#/components/schemas/ProblemJSON' + - $ref: "#/components/schemas/ProblemJSON" - type: object properties: title: @@ -1878,4 +1869,4 @@ tags: - name: legacy description: The pre 1.0 REST API. It may not adhere to our REST API guidelines and will be removed in the future. - name: v1 - description: The REST API for the TIRA 1.0 release \ No newline at end of file + description: The REST API for the TIRA 1.0 release diff --git a/documentation/organizers/deployment/index.rst b/documentation/organizers/deployment/index.rst index c5493b3de..707477c8f 100644 --- a/documentation/organizers/deployment/index.rst +++ b/documentation/organizers/deployment/index.rst @@ -51,4 +51,4 @@ Deployment application frontend demo - frontend_legacy \ No newline at end of file + frontend_legacy diff --git a/frontend/.devcontainer.json b/frontend/.devcontainer.json deleted file mode 100644 index 2df3fe9f6..000000000 --- a/frontend/.devcontainer.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "image": "webis/tira:vuetify-dev-0.0.1", - "customizations": { - "vscode": { - "extensions": [] - } - } -} diff --git a/frontend/Makefile b/frontend/Makefile deleted file mode 100644 index e4ddc1741..000000000 --- a/frontend/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -.PHONY: help frontend clean - - -.DEFAULT: help -help: - @echo "make tests" - @echo " run all tests (automatically done in Github Actions on each commit)" - @echo "make vite-build" - @echo " build and test the frontnend client code" - @echo "make clean" - @echo " clean the environment" - -frontend: - make vite-build - -vite-build: - @cd ../frontend \ - && yarn build - -vite-build-light: - @cd ../frontend \ - && yarn build-light - -vite-build-docker: - docker run -v ${PWD}:/app --platform linux/amd64 --rm -ti -w /app/src/tira/frontend-vuetify --entrypoint yarn webis/tira-application:basis-0.0.95 build - -vite-build-light-docker: - docker run -v ${PWD}:/app --platform linux/amd64 --rm -ti -w /app/src/tira/frontend-vuetify --entrypoint yarn webis/tira-application:basis-0.0.95 build-light diff --git a/frontend/README.md b/frontend/README.md index 136e38b33..d90cf5165 100644 --- a/frontend/README.md +++ b/frontend/README.md @@ -1,30 +1,7 @@ -# TIRA Vuetify Frontend +# TIRA Frontend -Start with `yarn dev`. +To get started, please read our developer documentation on... + - [... setting up the development environment](https://tira-io.github.io/tira/nightly/development/devenvironment.html) + - [... developing the frontend](https://tira-io.github.io/tira/nightly/development/frontend/index.html) -Go to [http://127.0.0.1:3000/#/tasks](http://127.0.0.1:3000/#/tasks) for plain and to [http://127.0.0.1:3000/index-discourse.html#/tasks](http://127.0.0.1:3000/index-discourse.html#/tasks) for discourse adjusted development. - -## Project setup - -``` -# yarn -yarn -``` - -### Compiles and hot-reloads for development - -``` -# yarn -yarn dev -``` - -### Compiles and minifies for production - -``` -# yarn -yarn build -``` - -### Customize configuration - -See [Configuration Reference](https://vitejs.dev/config/). +the latter link contains all you need to know to navigate the development lifecycle. \ No newline at end of file diff --git a/pipelines/src/django_tira_git/settings.py b/pipelines/src/django_tira_git/settings.py index a8a2726ff..1989fb2db 100644 --- a/pipelines/src/django_tira_git/settings.py +++ b/pipelines/src/django_tira_git/settings.py @@ -10,8 +10,9 @@ https://docs.djangoproject.com/en/3.1/ref/settings/ """ -from pathlib import Path import os +from pathlib import Path + import yaml # Build paths inside the project like this: BASE_DIR / 'subdir'. @@ -25,207 +26,238 @@ # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = custom_settings.get("django_secret", 'not-so-secret') +SECRET_KEY = custom_settings.get("django_secret", "not-so-secret") # SECURITY WARNING: don't run with debug turned on in production! DEBUG = custom_settings.get("debug", True) ALLOWED_HOSTS = custom_settings.get("allowed_hosts", []) -TIRA_ROOT = Path(custom_settings.get("tira_root", BASE_DIR.parents[1] / "tira-model" / "src")) +TIRA_ROOT = Path( + custom_settings.get("tira_root", BASE_DIR.parents[1] / "tira-model" / "src") +) if not TIRA_ROOT.is_dir(): - raise FileNotFoundError(f"TIRA_ROOT must point to an existing tira model but points to {TIRA_ROOT} instead.") + raise FileNotFoundError( + f"TIRA_ROOT must point to an existing tira model but points to {TIRA_ROOT} instead." + ) DEPLOYMENT = custom_settings.get("deployment", "disraptor") -DISRAPTOR_SECRET_FILE = Path(custom_settings.get("disraptor_secret_file", "/etc/discourse/client-api-key")) +DISRAPTOR_SECRET_FILE = Path( + custom_settings.get("disraptor_secret_file", "/etc/discourse/client-api-key") +) HOST_GRPC_PORT = custom_settings.get("host_grpc_port", "50051") APPLICATION_GRPC_PORT = custom_settings.get("application_grpc_port", "50052") GRPC_HOST = custom_settings.get("grpc_host", "local") # can be local or remote -TIRA_DB_NAME = Path(TIRA_ROOT / "state") / f"{custom_settings['database'].get('name', 'tira')}.sqlite3" \ - if custom_settings['database'].get('engine', 'django.db.backends.sqlite3') == 'django.db.backends.sqlite3' \ - else custom_settings['database'].get('name', 'tira') +TIRA_DB_NAME = ( + Path(TIRA_ROOT / "state") + / f"{custom_settings['database'].get('name', 'tira')}.sqlite3" + if custom_settings["database"].get("engine", "django.db.backends.sqlite3") + == "django.db.backends.sqlite3" + else custom_settings["database"].get("name", "tira") +) TIRA_DB = { - 'ENGINE': custom_settings['database'].get('engine', 'django.db.backends.sqlite3'), - 'NAME': TIRA_DB_NAME, - 'USER': custom_settings['database'].get('user', 'tira'), - 'PASSWORD': custom_settings['database'].get('password', 'replace-with-db-password'), - 'HOST': custom_settings['database'].get('host', 'tira-mariadb'), - 'PORT': int(custom_settings['database'].get('port', 3306)), + "ENGINE": custom_settings["database"].get("engine", "django.db.backends.sqlite3"), + "NAME": TIRA_DB_NAME, + "USER": custom_settings["database"].get("user", "tira"), + "PASSWORD": custom_settings["database"].get("password", "replace-with-db-password"), + "HOST": custom_settings["database"].get("host", "tira-mariadb"), + "PORT": int(custom_settings["database"].get("port", 3306)), } # Application definition INSTALLED_APPS = [ - 'tira.apps.TiraConfig', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', + "tira.apps.TiraConfig", + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", ] MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = 'django_admin.urls' +ROOT_URLCONF = "django_admin.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [BASE_DIR / 'templates'] - , - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [BASE_DIR / "templates"], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -WSGI_APPLICATION = 'django_admin.wsgi.application' +WSGI_APPLICATION = "django_admin.wsgi.application" # Database # https://docs.djangoproject.com/en/3.1/ref/settings/#databases -DATABASES = { - 'default': TIRA_DB -} +DATABASES = {"default": TIRA_DB} SESSION_ENGINE = "django.contrib.sessions.backends.cached_db" -DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" def logger_config(log_dir: Path): return { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}', - 'style': '{', + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": { + "format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}", + "style": "{", }, - 'default': { - 'format': '{levelname} {asctime} {module}: {message}', - 'style': '{', + "default": { + "format": "{levelname} {asctime} {module}: {message}", + "style": "{", }, - 'simple': { - 'format': '{levelname} {message}', - 'style': '{', + "simple": { + "format": "{levelname} {message}", + "style": "{", }, }, - 'filters': { - 'require_debug_true': { - '()': 'django.utils.log.RequireDebugTrue', + "filters": { + "require_debug_true": { + "()": "django.utils.log.RequireDebugTrue", }, }, - 'handlers': { - 'console': { - 'level': 'DEBUG', - 'filters': ['require_debug_true'], - 'class': 'logging.StreamHandler', - 'formatter': 'default' + "handlers": { + "console": { + "level": "DEBUG", + "filters": ["require_debug_true"], + "class": "logging.StreamHandler", + "formatter": "default", }, - 'ceph_django_debug': { - 'level': 'DEBUG', - 'class': 'logging.FileHandler', - 'filters': ['require_debug_true'], - 'filename': log_dir / 'django-debug.log', - 'formatter': 'default' + "ceph_django_debug": { + "level": "DEBUG", + "class": "logging.FileHandler", + "filters": ["require_debug_true"], + "filename": log_dir / "django-debug.log", + "formatter": "default", }, - 'ceph_django_info': { - 'level': 'INFO', - 'class': 'logging.FileHandler', - 'filename': log_dir / 'django-info.log', - 'formatter': 'default' + "ceph_django_info": { + "level": "INFO", + "class": "logging.FileHandler", + "filename": log_dir / "django-info.log", + "formatter": "default", }, - 'ceph_django_warn': { - 'level': 'WARNING', - 'class': 'logging.FileHandler', - 'filename': log_dir / 'django-warning.log', - 'formatter': 'default' + "ceph_django_warn": { + "level": "WARNING", + "class": "logging.FileHandler", + "filename": log_dir / "django-warning.log", + "formatter": "default", }, - 'ceph_tira_debug': { - 'level': 'DEBUG', - 'class': 'logging.FileHandler', - 'filters': ['require_debug_true'], - 'filename': log_dir / 'tira-debug.log', - 'formatter': 'default' + "ceph_tira_debug": { + "level": "DEBUG", + "class": "logging.FileHandler", + "filters": ["require_debug_true"], + "filename": log_dir / "tira-debug.log", + "formatter": "default", }, - 'ceph_tira_info': { - 'level': 'INFO', - 'class': 'logging.FileHandler', - 'filename': log_dir / 'tira-info.log', - 'formatter': 'default' + "ceph_tira_info": { + "level": "INFO", + "class": "logging.FileHandler", + "filename": log_dir / "tira-info.log", + "formatter": "default", }, - 'ceph_tira_warn': { - 'level': 'WARNING', - 'class': 'logging.FileHandler', - 'filename': log_dir / 'tira-warning.log', - 'formatter': 'default' + "ceph_tira_warn": { + "level": "WARNING", + "class": "logging.FileHandler", + "filename": log_dir / "tira-warning.log", + "formatter": "default", }, - 'ceph_tira_db': { - 'level': 'INFO', - 'class': 'logging.FileHandler', - 'filename': log_dir / 'tira-db.log', - 'formatter': 'default' + "ceph_tira_db": { + "level": "INFO", + "class": "logging.FileHandler", + "filename": log_dir / "tira-db.log", + "formatter": "default", }, - 'ceph_grpc_debug': { - 'level': 'DEBUG', - 'class': 'logging.FileHandler', - 'filters': ['require_debug_true'], - 'filename': log_dir / 'grpc-debug.log', - 'formatter': 'default' + "ceph_grpc_debug": { + "level": "DEBUG", + "class": "logging.FileHandler", + "filters": ["require_debug_true"], + "filename": log_dir / "grpc-debug.log", + "formatter": "default", }, - 'ceph_grpc_info': { - 'level': 'INFO', - 'class': 'logging.FileHandler', - 'filename': log_dir / 'grpc-info.log', - 'formatter': 'default' + "ceph_grpc_info": { + "level": "INFO", + "class": "logging.FileHandler", + "filename": log_dir / "grpc-info.log", + "formatter": "default", }, - 'ceph_grpc_warn': { - 'level': 'WARNING', - 'class': 'logging.FileHandler', - 'filename': log_dir / 'grpc-warning.log', - 'formatter': 'default' + "ceph_grpc_warn": { + "level": "WARNING", + "class": "logging.FileHandler", + "filename": log_dir / "grpc-warning.log", + "formatter": "default", }, }, - 'loggers': { - 'django': { - 'handlers': ['console', 'ceph_django_debug', 'ceph_django_warn', 'ceph_django_info'], - 'propagate': True, + "loggers": { + "django": { + "handlers": [ + "console", + "ceph_django_debug", + "ceph_django_warn", + "ceph_django_info", + ], + "propagate": True, }, - 'django.requests': { - 'handlers': ['console', 'ceph_django_debug', 'ceph_django_warn', 'ceph_django_info'], - 'propagate': True, + "django.requests": { + "handlers": [ + "console", + "ceph_django_debug", + "ceph_django_warn", + "ceph_django_info", + ], + "propagate": True, }, - 'django.server': { - 'handlers': ['console', 'ceph_django_debug', 'ceph_django_warn', 'ceph_django_info'], - 'propagate': True, + "django.server": { + "handlers": [ + "console", + "ceph_django_debug", + "ceph_django_warn", + "ceph_django_info", + ], + "propagate": True, }, - 'tira': { - 'handlers': ['console', 'ceph_tira_debug', 'ceph_tira_warn', 'ceph_tira_info'], - 'propagate': True, + "tira": { + "handlers": [ + "console", + "ceph_tira_debug", + "ceph_tira_warn", + "ceph_tira_info", + ], + "propagate": True, }, - 'tira_db': { - 'handlers': ['console', 'ceph_tira_db'], - 'propagate': True, + "tira_db": { + "handlers": ["console", "ceph_tira_db"], + "propagate": True, }, - 'grpc_server': { - 'handlers': ['console', 'ceph_grpc_debug', 'ceph_grpc_warn', 'ceph_grpc_info'], - 'propagate': True, + "grpc_server": { + "handlers": [ + "console", + "ceph_grpc_debug", + "ceph_grpc_warn", + "ceph_grpc_info", + ], + "propagate": True, }, - } + }, } @@ -251,40 +283,28 @@ def logger_config(log_dir: Path): AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] # Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'Europe/Berlin' +TIME_ZONE = "Europe/Berlin" USE_I18N = True USE_L10N = True USE_TZ = True - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/3.1/howto/static-files/ - -STATIC_URL = '/public/' - -STATICFILES_DIRS = [ - BASE_DIR / "static/", - BASE_DIR / "tira/static/" -] - -STATIC_ROOT = "/var/www/public" diff --git a/python-client/.devcontainer.json b/python-client/.devcontainer.json deleted file mode 100644 index 9eee2fe84..000000000 --- a/python-client/.devcontainer.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "image": "webis/tira:python-client-dev-0.0.4", - "customizations": { - "vscode": { - "extensions": ["ms-python.python", "ms-python.vscode-pylance", "ms-toolsai.jupyter"] - } - }, - "runArgs": ["--privileged"], - "mounts": ["source=/var/run/docker.sock,target=/var/run/docker.sock,type=bind"] -} diff --git a/python-client/.gitignore b/python-client/.gitignore index b44851b1c..8ed413962 100644 --- a/python-client/.gitignore +++ b/python-client/.gitignore @@ -1,4 +1,3 @@ build/ -.vscode/ .devcontainer/ \ No newline at end of file diff --git a/python-client/.vscode/settings.json b/python-client/.vscode/settings.json new file mode 100644 index 000000000..9b388533a --- /dev/null +++ b/python-client/.vscode/settings.json @@ -0,0 +1,7 @@ +{ + "python.testing.pytestArgs": [ + "tests" + ], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} \ No newline at end of file diff --git a/python-client/Dockerfile.dev b/python-client/Dockerfile.dev deleted file mode 100644 index e1574d031..000000000 --- a/python-client/Dockerfile.dev +++ /dev/null @@ -1,51 +0,0 @@ -#docker build -t webis/tira:python-client-dev-0.0.5 -f Dockerfile.dev . -FROM ubuntu:latest - -ENV TZ=Europe/Berlin -RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone - -RUN apt-get update \ - && apt-get install -y python3 python3-pip build-essential openjdk-11-jdk \ - && apt-get install -y sudo git locales - -RUN echo "en_US.UTF-8 UTF-8" | sudo tee -a /etc/locale.gen && locale-gen - -RUN apt-get install -y curl \ - && curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /etc/apt/keyrings/docker.gpg \ - && chmod a+r /etc/apt/keyrings/docker.gpg \ - && echo \ - "deb [arch="$(dpkg --print-architecture)" signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \ - "$(. /etc/os-release && echo "$VERSION_CODENAME")" stable" | \ - tee /etc/apt/sources.list.d/docker.list > /dev/null \ - && apt-get update -y \ - && apt-get install -y docker-ce-cli - -# Change root Password to 1234 -RUN echo 'root:1234' | chpasswd - -RUN pip3 install twine coverage-badge python-terrier coverage -RUN pip3 install pytest docker -# Install umlet -RUN mkdir -p /usr/share/umlet \ - && cd /usr/share/umlet \ - && wget https://www.umlet.com/download/umlet_15_1/umlet-standalone-15.1.zip -O download.zip \ - && cd /usr/share/umlet \ - && unzip download.zip -# Install sphinx dependencies -## TODO: use "pip install .[test,dev,docs]" from setup.cfg ; not done for now since this Docker file should be -## superseded anyway -RUN pip3 install sphinx furo myst-parser sphinx-design sphinxcontrib-plantuml sphinxcontrib-umlet -RUN apt-get update && apt-get install -y plantuml -RUN pip3 install tira approvaltests \ - && cd /tmp \ - && python3 -c 'from tira.third_party_integrations import ensure_pyterrier_is_loaded; ensure_pyterrier_is_loaded();' \ - && pip3 uninstall -y tira - -# Create new user: "dev" with password "1234" and change to that user -RUN useradd -ms /bin/bash dev \ - && echo 'dev:1234' | chpasswd \ - && usermod -aG sudo dev \ - && groupadd -g 973 docker \ - && usermod -aG docker dev -USER dev - diff --git a/python-client/setup.cfg b/python-client/setup.cfg index 861a49cf6..94af7a622 100644 --- a/python-client/setup.cfg +++ b/python-client/setup.cfg @@ -40,7 +40,7 @@ test = pytest-cov>=5.0,==5.* approvaltests dev = - python-terrier + python-terrier==0.10.* ir-datasets [options.entry_points] diff --git a/python-client/tira/io_utils.py b/python-client/tira/io_utils.py index 6ce4025ac..3be46d55b 100644 --- a/python-client/tira/io_utils.py +++ b/python-client/tira/io_utils.py @@ -152,15 +152,6 @@ def _ln_huggingface_model_mounts(models: str) -> str: return "; ".join(ret + [f'echo "mounted {len(models)} models"']) -def _default_hf_home_in_tira_host() -> str: - """Returns the location of the hf home on the tira hosts that are mounted read-only into the pods. - - Returns: - str: the HF_HOME on a tira host. - """ - return "/mnt/ceph/tira/data/publicly-shared-datasets/huggingface/" - - def all_lines_to_pandas(input_file: Union[str, Iterable[str]], load_default_text: bool) -> pd.DataFrame: """ .. todo:: add documentation diff --git a/python-client/tira/tira_run.py b/python-client/tira/tira_run.py index fde0776a8..82d11d2f4 100755 --- a/python-client/tira/tira_run.py +++ b/python-client/tira/tira_run.py @@ -373,7 +373,7 @@ def main(args=None): print(f"Ensure that the input run {args.input_run} is available.") args.input_run = tira.get_run_output(args.input_run, dataset, True) print("Done: input run is available locally.") - if args.input_run and not isinstance(args.input_run, list) and len(args.input_run) > 0: + elif args.input_run and not isinstance(args.input_run, list) and len(args.input_run) > 0: temp_dir = "/tmp/" + tempfile.TemporaryDirectory().name os.makedirs(temp_dir, exist_ok=True) for num, input_run in zip(range(len(args.input_run)), args.input_run): @@ -381,7 +381,7 @@ def main(args=None): input_run = tira.get_run_output(input_run, dataset, True) shutil.copytree(input_run, temp_dir + "/" + str(1 + num)) args.input_run = temp_dir - if args.input_run_directory and "none" != args.input_run_directory.lower(): + elif args.input_run_directory and "none" != args.input_run_directory.lower(): args.input_run = os.path.abspath(args.input_run_directory) if args.evaluate: diff --git a/tira.code-workspace b/tira.code-workspace new file mode 100644 index 000000000..837dd66fb --- /dev/null +++ b/tira.code-workspace @@ -0,0 +1,34 @@ +{ + "folders": [ + { + "name": "Backend", + "path": "application" + }, + { + "name": "Frontend", + "path": "frontend" + }, + { + "name": "Python API", + "path": "python-client" + }, + { + "name": "Documentation", + "path": "documentation" + }, + { + "name": "Root", + "path": "." + }, + ], + "settings": { + "files.exclude": { + "**/__pycache__": true, + "**/.mypy_cache": true, + "**/.pytest_cache": true, + "**/*.egg-info": true, + "**/node_modules": true, + "application/.data-dumps": true + }, + } +} \ No newline at end of file